Compare commits

..

No commits in common. "main" and "v0.3.3" have entirely different histories.
main ... v0.3.3

378 changed files with 10473 additions and 86872 deletions

View file

@ -11,10 +11,6 @@
**/.toolstarget
**/.vs
**/.vscode
**/.history
**/media
**/models
**/static
**/*.*proj.user
**/*.dbmdl
**/*.jfm
@ -30,5 +26,3 @@
**/values.dev.yaml
LICENSE
README.md
data/
docker/data/

View file

@ -1,64 +0,0 @@
name: Bug Report
description: I have an issue with Dispatcharr
title: "[Bug]: "
labels: ["Triage"]
type: "Bug"
projects: []
assignees: []
body:
- type: markdown
attributes:
value: |
Please make sure you search for similar issues before submitting. Thank you for your bug report!
- type: textarea
id: describe-the-bug
attributes:
label: Describe the bug
description: Make sure to attach screenshots if possible!
placeholder: Tell us what you see!
value: "A clear and concise description of what the bug is. What did you expect to happen?"
validations:
required: true
- type: textarea
id: reproduce
attributes:
label: How can we recreate this bug?
description: Be detailed!
placeholder: Tell us what you see!
value: "1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error"
validations:
required: true
- type: input
id: dispatcharr-version
attributes:
label: Dispatcharr Version
description: What version of Dispatcharr are you running?
placeholder: Located bottom left of main screen
validations:
required: true
- type: input
id: docker-version
attributes:
label: Docker Version
description: What version of Docker are you running?
placeholder: docker --version
validations:
required: true
- type: textarea
id: docker-compose
attributes:
label: What's in your Docker Compose file?
description: Please share your docker-compose.yml file
placeholder: Tell us what you see!
value: "If not using Docker Compose just put not using."
validations:
required: true
- type: textarea
id: client-info
attributes:
label: Client Information
description: What are you using the view the streams from Dispatcharr
placeholder: Tell us what you see!
value: "Device, App, Versions for both, etc..."
validations:
required: true

View file

@ -1 +0,0 @@
blank_issues_enabled: false

View file

@ -1,39 +0,0 @@
name: Feature request
description: I want to suggest a new feature for Dispatcharr
title: "[Feature]: "
labels: ["Triage"]
type: "Feature"
projects: []
assignees: []
body:
- type: markdown
attributes:
value: |
Thank you for helping to make Dispatcharr better!
- type: textarea
id: describe-problem
attributes:
label: Is your feature request related to a problem?
description: Make sure to attach screenshots if possible!
placeholder: Tell us what you see!
value: "A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]"
validations:
required: true
- type: textarea
id: describe-solution
attributes:
label: Describe the solution you'd like
description: A clear and concise description of what you want to happen.
placeholder: Tell us what you see!
value: "Describe here."
validations:
required: true
- type: textarea
id: extras
attributes:
label: Additional context
description: Anything else you want to add?
placeholder: Tell us what you see!
value: "Nothing Extra"
validations:
required: true

View file

@ -1,250 +0,0 @@
name: Base Image Build
on:
push:
branches: [main, dev]
paths:
- 'docker/DispatcharrBase'
- '.github/workflows/base-image.yml'
- 'requirements.txt'
pull_request:
branches: [main, dev]
paths:
- 'docker/DispatcharrBase'
- '.github/workflows/base-image.yml'
- 'requirements.txt'
workflow_dispatch: # Allow manual triggering
permissions:
contents: write # For managing releases and pushing tags
packages: write # For publishing to GitHub Container Registry
jobs:
prepare:
runs-on: ubuntu-24.04
outputs:
repo_owner: ${{ steps.meta.outputs.repo_owner }}
repo_name: ${{ steps.meta.outputs.repo_name }}
branch_tag: ${{ steps.meta.outputs.branch_tag }}
timestamp: ${{ steps.timestamp.outputs.timestamp }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Generate timestamp for build
id: timestamp
run: |
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
- name: Set repository and image metadata
id: meta
run: |
# Get lowercase repository owner
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
# Get repository name
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
# Determine branch name
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
echo "branch_tag=base" >> $GITHUB_OUTPUT
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
echo "branch_tag=base-dev" >> $GITHUB_OUTPUT
else
# For other branches, use the branch name
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
echo "branch_tag=base-${BRANCH}" >> $GITHUB_OUTPUT
fi
docker:
needs: [prepare]
strategy:
fail-fast: false
matrix:
platform: [amd64, arm64]
include:
- platform: amd64
runner: ubuntu-24.04
- platform: arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Configure Git
run: |
git config user.name "GitHub Actions"
git config user.email "actions@github.com"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
labels: |
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
org.opencontainers.image.url=https://github.com/${{ github.repository }}
org.opencontainers.image.source=https://github.com/${{ github.repository }}
org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
org.opencontainers.image.revision=${{ github.sha }}
org.opencontainers.image.licenses=See repository
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
org.opencontainers.image.authors=${{ github.actor }}
maintainer=${{ github.actor }}
build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
- name: Build and push Docker base image
uses: docker/build-push-action@v4
with:
context: .
file: ./docker/DispatcharrBase
push: ${{ github.event_name != 'pull_request' }}
platforms: linux/${{ matrix.platform }}
tags: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
BRANCH=${{ github.ref_name }}
REPO_URL=https://github.com/${{ github.repository }}
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
create-manifest:
needs: [prepare, docker]
runs-on: ubuntu-24.04
if: ${{ github.event_name != 'pull_request' }}
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create multi-arch manifest tags
run: |
set -euo pipefail
OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO=${{ needs.prepare.outputs.repo_name }}
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
# GitHub Container Registry manifests
# branch tag (e.g. base or base-dev)
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
# branch + timestamp tag
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
# Docker Hub manifests
# branch tag (e.g. base or base-dev)
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
# branch + timestamp tag
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64

View file

@ -2,86 +2,18 @@ name: CI Pipeline
on:
push:
branches: [dev]
paths-ignore:
- '**.md'
branches: [ dev ]
pull_request:
branches: [dev]
workflow_dispatch:
branches: [ dev ]
# Add explicit permissions for the workflow
permissions:
contents: write
packages: write
contents: write # For managing releases and pushing tags
packages: write # For publishing to GitHub Container Registry
jobs:
prepare:
runs-on: ubuntu-24.04
# compute a single timestamp, version, and repo metadata for the entire workflow
outputs:
repo_owner: ${{ steps.meta.outputs.repo_owner }}
repo_name: ${{ steps.meta.outputs.repo_name }}
branch_tag: ${{ steps.meta.outputs.branch_tag }}
version: ${{ steps.version.outputs.version }}
timestamp: ${{ steps.timestamp.outputs.timestamp }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Generate timestamp for build
id: timestamp
run: |
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
- name: Extract version info
id: version
run: |
VERSION=$(python -c "import version; print(version.__version__)")
echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Set repository and image metadata
id: meta
run: |
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
echo "branch_tag=latest" >> $GITHUB_OUTPUT
echo "is_main=true" >> $GITHUB_OUTPUT
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
echo "branch_tag=dev" >> $GITHUB_OUTPUT
echo "is_main=false" >> $GITHUB_OUTPUT
else
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
echo "is_main=false" >> $GITHUB_OUTPUT
fi
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
echo "is_fork=true" >> $GITHUB_OUTPUT
else
echo "is_fork=false" >> $GITHUB_OUTPUT
fi
docker:
needs: [prepare]
strategy:
fail-fast: false
matrix:
platform: [amd64, arm64]
include:
- platform: amd64
runner: ubuntu-24.04
- platform: arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
# no per-job outputs here; shared metadata comes from the `prepare` job
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
@ -112,162 +44,73 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Increment Build Number
if: steps.check_actor.outputs.is_bot != 'true'
id: increment_build
run: |
python scripts/increment_build.py
BUILD=$(python -c "import version; print(version.__build__)")
echo "build=${BUILD}" >> $GITHUB_OUTPUT
- name: Extract metadata for Docker
- name: Commit Build Number Update
if: steps.check_actor.outputs.is_bot != 'true'
run: |
git add version.py
git commit -m "Increment build number to ${{ steps.increment_build.outputs.build }} [skip ci]"
git push
- name: Extract version info
id: version
run: |
VERSION=$(python -c "import version; print(version.__version__)")
BUILD=$(python -c "import version; print(version.__build__)")
echo "version=${VERSION}" >> $GITHUB_OUTPUT
echo "build=${BUILD}" >> $GITHUB_OUTPUT
echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
- name: Set repository and image metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
labels: |
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
org.opencontainers.image.url=https://github.com/${{ github.repository }}
org.opencontainers.image.source=https://github.com/${{ github.repository }}
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
org.opencontainers.image.revision=${{ github.sha }}
org.opencontainers.image.licenses=See repository
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
org.opencontainers.image.authors=${{ github.actor }}
maintainer=${{ github.actor }}
build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
run: |
# Get lowercase repository owner
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
# Get repository name
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
# Determine branch name
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
echo "branch_tag=latest" >> $GITHUB_OUTPUT
echo "is_main=true" >> $GITHUB_OUTPUT
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
echo "branch_tag=dev" >> $GITHUB_OUTPUT
echo "is_main=false" >> $GITHUB_OUTPUT
else
# For other branches, use the branch name
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
echo "is_main=false" >> $GITHUB_OUTPUT
fi
# Determine if this is from a fork
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
echo "is_fork=true" >> $GITHUB_OUTPUT
else
echo "is_fork=false" >> $GITHUB_OUTPUT
fi
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
# Build only the platform for this matrix job to avoid running amd64
# stages under qemu on an arm64 runner (and vice-versa). This makes
# the matrix runner's platform the one built by buildx.
platforms: linux/${{ matrix.platform }}
# push arch-specific tags from each matrix job (they will be combined
# into a multi-arch manifest in a follow-up job)
platforms: linux/amd64 # Fast build - amd64 only
tags: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }}
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.version.outputs.build }}
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }}
build-args: |
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
BASE_TAG=base
BRANCH=${{ github.ref_name }}
REPO_URL=https://github.com/${{ github.repository }}
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
file: ./docker/Dockerfile
create-manifest:
# wait for prepare and all matrix builds to finish
needs: [prepare, docker]
runs-on: ubuntu-24.04
if: ${{ github.event_name != 'pull_request' }}
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create multi-arch manifest tags
run: |
set -euo pipefail
OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO=${{ needs.prepare.outputs.repo_name }}
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
VERSION=${{ needs.prepare.outputs.version }}
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
# branch tag (e.g. latest or dev)
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
# version + timestamp tag
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
--tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
# also create Docker Hub manifests using the same username
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64

View file

@ -1,41 +0,0 @@
name: Frontend Tests
on:
push:
branches: [main, dev]
paths:
- 'frontend/**'
- '.github/workflows/frontend-tests.yml'
pull_request:
branches: [main, dev]
paths:
- 'frontend/**'
- '.github/workflows/frontend-tests.yml'
jobs:
test:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./frontend
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '24'
cache: 'npm'
cache-dependency-path: './frontend/package-lock.json'
- name: Install dependencies
run: npm ci
# - name: Run linter
# run: npm run lint
- name: Run tests
run: npm test

View file

@ -15,22 +15,16 @@ on:
# Add explicit permissions for the workflow
permissions:
contents: write # For managing releases and pushing tags
packages: write # For publishing to GitHub Container Registry
contents: write # For managing releases and pushing tags
packages: write # For publishing to GitHub Container Registry
jobs:
prepare:
runs-on: ubuntu-24.04
outputs:
new_version: ${{ steps.update_version.outputs.new_version }}
repo_owner: ${{ steps.meta.outputs.repo_owner }}
repo_name: ${{ steps.meta.outputs.repo_name }}
timestamp: ${{ steps.timestamp.outputs.timestamp }}
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Configure Git
run: |
@ -44,194 +38,55 @@ jobs:
NEW_VERSION=$(python -c "import version; print(f'{version.__version__}')")
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
- name: Update Changelog
run: |
python scripts/update_changelog.py ${{ steps.update_version.outputs.new_version }}
- name: Set repository metadata
id: meta
- name: Set lowercase repo owner
id: repo_owner
run: |
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
echo "lowercase=${REPO_OWNER}" >> $GITHUB_OUTPUT
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Generate timestamp for build
id: timestamp
run: |
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Commit and Tag
run: |
git add version.py CHANGELOG.md
git add version.py
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
git push origin main --tags
docker:
needs: [prepare]
strategy:
fail-fast: false
matrix:
platform: [amd64, arm64]
include:
- platform: amd64
runner: ubuntu-24.04
- platform: arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
ref: main
- name: Configure Git
run: |
git config user.name "GitHub Actions"
git config user.email "actions@github.com"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
labels: |
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
org.opencontainers.image.url=https://github.com/${{ github.repository }}
org.opencontainers.image.source=https://github.com/${{ github.repository }}
org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }}
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
org.opencontainers.image.revision=${{ github.sha }}
org.opencontainers.image.licenses=See repository
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
org.opencontainers.image.authors=${{ github.actor }}
maintainer=${{ github.actor }}
build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }}
- name: Build and push Docker image
- name: Build and Push Release Image
uses: docker/build-push-action@v4
with:
context: .
push: true
platforms: linux/${{ matrix.platform }}
platforms: linux/amd64,linux/arm64, #linux/arm/v7 # Multi-arch support for releases
tags: |
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-amd64
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-arm64
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-amd64
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-arm64
build-args: |
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
BRANCH=${{ github.ref_name }}
REPO_URL=https://github.com/${{ github.repository }}
file: ./docker/Dockerfile
create-manifest:
needs: [prepare, docker]
runs-on: ubuntu-24.04
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create multi-arch manifest tags
run: |
set -euo pipefail
OWNER=${{ needs.prepare.outputs.repo_owner }}
REPO=${{ needs.prepare.outputs.repo_name }}
VERSION=${{ needs.prepare.outputs.new_version }}
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
# GitHub Container Registry manifests
# Create one manifest with both latest and version tags
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${VERSION}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
--tag ghcr.io/${OWNER}/${REPO}:latest \
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
# Docker Hub manifests
# Create one manifest with both latest and version tags
docker buildx imagetools create \
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
--annotation "index:org.opencontainers.image.version=${VERSION}" \
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
--annotation "index:org.opencontainers.image.licenses=See repository" \
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
--annotation "index:maintainer=${{ github.actor }}" \
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
create-release:
needs: [prepare, create-manifest]
runs-on: ubuntu-24.04
steps:
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: v${{ needs.prepare.outputs.new_version }}
name: Release v${{ needs.prepare.outputs.new_version }}
tag_name: v${{ steps.update_version.outputs.new_version }}
name: Release v${{ steps.update_version.outputs.new_version }}
draft: false
prerelease: false
token: ${{ secrets.GITHUB_TOKEN }}

3
.gitignore vendored
View file

@ -18,5 +18,4 @@ dump.rdb
debugpy*
uwsgi.sock
package-lock.json
models
.idea
models

File diff suppressed because it is too large Load diff

View file

@ -1,286 +0,0 @@
# Dispatcharr Plugins
This document explains how to build, install, and use Python plugins in Dispatcharr. It covers discovery, the plugin interface, settings, actions, how to access application APIs, and examples.
---
## Quick Start
1) Create a folder under `/app/data/plugins/my_plugin/` (host path `data/plugins/my_plugin/` in the repo).
2) Add a `plugin.py` file exporting a `Plugin` class:
```
# /app/data/plugins/my_plugin/plugin.py
class Plugin:
name = "My Plugin"
version = "0.1.0"
description = "Does something useful"
# Settings fields rendered by the UI and persisted by the backend
fields = [
{"id": "enabled", "label": "Enabled", "type": "boolean", "default": True},
{"id": "limit", "label": "Item limit", "type": "number", "default": 5},
{"id": "mode", "label": "Mode", "type": "select", "default": "safe",
"options": [
{"value": "safe", "label": "Safe"},
{"value": "fast", "label": "Fast"},
]},
{"id": "note", "label": "Note", "type": "string", "default": ""},
]
# Actions appear as buttons. Clicking one calls run(action, params, context)
actions = [
{"id": "do_work", "label": "Do Work", "description": "Process items"},
]
def run(self, action: str, params: dict, context: dict):
settings = context.get("settings", {})
logger = context.get("logger")
if action == "do_work":
limit = int(settings.get("limit", 5))
mode = settings.get("mode", "safe")
logger.info(f"My Plugin running with limit={limit}, mode={mode}")
# Do a small amount of work here. Schedule Celery tasks for heavy work.
return {"status": "ok", "processed": limit, "mode": mode}
return {"status": "error", "message": f"Unknown action {action}"}
```
3) Open the Plugins page in the UI, click the refresh icon to reload discovery, then configure and run your plugin.
---
## Where Plugins Live
- Default directory: `/app/data/plugins` inside the container.
- Override with env var: `DISPATCHARR_PLUGINS_DIR`.
- Each plugin is a directory containing either:
- `plugin.py` exporting a `Plugin` class, or
- a Python package (`__init__.py`) exporting a `Plugin` class.
The directory name (lowercased, spaces as `_`) is used as the registry key and module import path (e.g. `my_plugin.plugin`).
---
## Discovery & Lifecycle
- Discovery runs at server startup and on-demand when:
- Fetching the plugins list from the UI
- Hitting `POST /api/plugins/plugins/reload/`
- The loader imports each plugin module and instantiates `Plugin()`.
- Metadata (name, version, description) and a per-plugin settings JSON are stored in the DB.
Backend code:
- Loader: `apps/plugins/loader.py`
- API Views: `apps/plugins/api_views.py`
- API URLs: `apps/plugins/api_urls.py`
- Model: `apps/plugins/models.py` (stores `enabled` flag and `settings` per plugin)
---
## Plugin Interface
Export a `Plugin` class. Supported attributes and behavior:
- `name` (str): Human-readable name.
- `version` (str): Semantic version string.
- `description` (str): Short description.
- `fields` (list): Settings schema used by the UI to render controls.
- `actions` (list): Available actions; the UI renders a Run button for each.
- `run(action, params, context)` (callable): Invoked when a user clicks an action.
### Settings Schema
Supported field `type`s:
- `boolean`
- `number`
- `string`
- `select` (requires `options`: `[{"value": ..., "label": ...}, ...]`)
Common field keys:
- `id` (str): Settings key.
- `label` (str): Label shown in the UI.
- `type` (str): One of above.
- `default` (any): Default value used until saved.
- `help_text` (str, optional): Shown under the control.
- `options` (list, for select): List of `{value, label}`.
The UI automatically renders settings and persists them. The backend stores settings in `PluginConfig.settings`.
Read settings in `run` via `context["settings"]`.
### Actions
Each action is a dict:
- `id` (str): Unique action id.
- `label` (str): Button label.
- `description` (str, optional): Helper text.
Clicking an action calls your plugins `run(action, params, context)` and shows a notification with the result or error.
### Action Confirmation (Modal)
Developers can request a confirmation modal per action using the `confirm` key on the action. Options:
- Boolean: `confirm: true` will show a default confirmation modal.
- Object: `confirm: { required: true, title: '...', message: '...' }` to customize the modal title and message.
Example:
```
actions = [
{
"id": "danger_run",
"label": "Do Something Risky",
"description": "Runs a job that affects many records.",
"confirm": { "required": true, "title": "Proceed?", "message": "This will modify many records." },
}
]
```
---
## Accessing Dispatcharr APIs from Plugins
Plugins are server-side Python code running within the Django application. You can:
- Import models and run queries/updates:
```
from apps.m3u.models import M3UAccount
from apps.epg.models import EPGSource
from apps.channels.models import Channel
from core.models import CoreSettings
```
- Dispatch Celery tasks for heavy work (recommended):
```
from apps.m3u.tasks import refresh_m3u_accounts # apps/m3u/tasks.py
from apps.epg.tasks import refresh_all_epg_data # apps/epg/tasks.py
refresh_m3u_accounts.delay()
refresh_all_epg_data.delay()
```
- Send WebSocket updates:
```
from core.utils import send_websocket_update
send_websocket_update('updates', 'update', {"type": "plugin", "plugin": "my_plugin", "message": "Done"})
```
- Use transactions:
```
from django.db import transaction
with transaction.atomic():
# bulk updates here
...
```
- Log via provided context or standard logging:
```
def run(self, action, params, context):
logger = context.get("logger") # already configured
logger.info("running action %s", action)
```
Prefer Celery tasks (`.delay()`) to keep `run` fast and non-blocking.
---
## REST Endpoints (for UI and tooling)
- List plugins: `GET /api/plugins/plugins/`
- Response: `{ "plugins": [{ key, name, version, description, enabled, fields, settings, actions }, ...] }`
- Reload discovery: `POST /api/plugins/plugins/reload/`
- Import plugin: `POST /api/plugins/plugins/import/` with form-data file field `file`
- Update settings: `POST /api/plugins/plugins/<key>/settings/` with `{"settings": {...}}`
- Run action: `POST /api/plugins/plugins/<key>/run/` with `{"action": "id", "params": {...}}`
- Enable/disable: `POST /api/plugins/plugins/<key>/enabled/` with `{"enabled": true|false}`
Notes:
- When disabled, a plugin cannot run actions; backend returns HTTP 403.
---
## Importing Plugins
- In the UI, click the Import button on the Plugins page and upload a `.zip` containing a plugin folder.
- The archive should contain either `plugin.py` or a Python package (`__init__.py`).
- On success, the UI shows the plugin name/description and lets you enable it immediately (plugins are disabled by default).
---
## Enabling / Disabling Plugins
- Each plugin has a persisted `enabled` flag (default: disabled) and `ever_enabled` flag in the DB (`apps/plugins/models.py`).
- New plugins are disabled by default and require an explicit enable.
- The first time a plugin is enabled, the UI shows a trust warning modal explaining that plugins can run arbitrary server-side code.
- The Plugins page shows a toggle in the card header. Turning it off dims the card and disables the Run button.
- Backend enforcement: Attempts to run an action for a disabled plugin return HTTP 403.
---
## Example: Refresh All Sources Plugin
Path: `data/plugins/refresh_all/plugin.py`
```
class Plugin:
name = "Refresh All Sources"
version = "1.0.0"
description = "Force refresh all M3U accounts and EPG sources."
fields = [
{"id": "confirm", "label": "Require confirmation", "type": "boolean", "default": True,
"help_text": "If enabled, the UI should ask before running."}
]
actions = [
{"id": "refresh_all", "label": "Refresh All M3Us and EPGs",
"description": "Queues background refresh for all active M3U accounts and EPG sources."}
]
def run(self, action: str, params: dict, context: dict):
if action == "refresh_all":
from apps.m3u.tasks import refresh_m3u_accounts
from apps.epg.tasks import refresh_all_epg_data
refresh_m3u_accounts.delay()
refresh_all_epg_data.delay()
return {"status": "queued", "message": "Refresh jobs queued"}
return {"status": "error", "message": f"Unknown action: {action}"}
```
---
## Best Practices
- Keep `run` short and schedule heavy operations via Celery tasks.
- Validate and sanitize `params` received from the UI.
- Use database transactions for bulk or related updates.
- Log actionable messages for troubleshooting.
- Only write files under `/data` or `/app/data` paths.
- Treat plugins as trusted code: they run with full app permissions.
---
## Troubleshooting
- Plugin not listed: ensure the folder exists and contains `plugin.py` with a `Plugin` class.
- Import errors: the folder name is the import name; avoid spaces or exotic characters.
- No confirmation: include a boolean field with `id: "confirm"` and set it to true or default true.
- HTTP 403 on run: the plugin is disabled; enable it from the toggle or via the `enabled/` endpoint.
---
## Contributing
- Keep dependencies minimal. Vendoring small helpers into the plugin folder is acceptable.
- Use the existing task and model APIs where possible; propose extensions if you need new capabilities.
---
## Internals Reference
- Loader: `apps/plugins/loader.py`
- API Views: `apps/plugins/api_views.py`
- API URLs: `apps/plugins/api_urls.py`
- Model: `apps/plugins/models.py`
- Frontend page: `frontend/src/pages/Plugins.jsx`
- Sidebar entry: `frontend/src/components/Sidebar.jsx`

View file

@ -22,7 +22,6 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
📊 **Real-Time Stats Dashboard** — Live insights into stream health and client activity\
🧠 **EPG Auto-Match** — Match program data to channels automatically\
⚙️ **Streamlink + FFmpeg Support** — Flexible backend options for streaming and recording\
🎬 **VOD Management** — Full Video on Demand support with movies and TV series\
🧼 **UI & UX Enhancements** — Smoother, faster, more responsive interface\
🛁 **Output Compatibility** — HDHomeRun, M3U, and XMLTV EPG support for Plex, Jellyfin, and more
@ -32,7 +31,6 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
**Full IPTV Control** — Import, organize, proxy, and monitor IPTV streams on your own terms\
**Smart Playlist Handling** — M3U import, filtering, grouping, and failover support\
**VOD Content Management** — Organize movies and TV series with metadata and streaming\
**Reliable EPG Integration** — Match and manage TV guide data with ease\
**Clean & Responsive Interface** — Modern design that gets out of your way\
**Fully Self-Hosted** — Total control, zero reliance on third-party services
@ -106,7 +104,7 @@ Heres how you can join the party:
## 📚 Roadmap & Documentation
- 📚 **Roadmap:** Coming soon!
- 📖 **Documentation:** [Dispatcharr Docs](https://dispatcharr.github.io/Dispatcharr-Docs/)
- 📖 **Wiki:** In progress — tutorials, API references, and advanced setup guides on the way!
---
@ -135,4 +133,4 @@ Have a question? Want to suggest a feature? Just want to say hi?\
---
### 🚀 *Happy Streaming! The Dispatcharr Team*
### 🚀 *Happy Streaming! The Dispatcharr Team*

View file

@ -1,39 +1,41 @@
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .api_views import (
AuthViewSet,
UserViewSet,
GroupViewSet,
TokenObtainPairView,
TokenRefreshView,
list_permissions,
initialize_superuser,
AuthViewSet, UserViewSet, GroupViewSet,
list_permissions, initialize_superuser
)
from rest_framework_simplejwt import views as jwt_views
app_name = "accounts"
app_name = 'accounts'
# 🔹 Register ViewSets with a Router
router = DefaultRouter()
router.register(r"users", UserViewSet, basename="user")
router.register(r"groups", GroupViewSet, basename="group")
router.register(r'users', UserViewSet, basename='user')
router.register(r'groups', GroupViewSet, basename='group')
# 🔹 Custom Authentication Endpoints
auth_view = AuthViewSet.as_view({"post": "login"})
auth_view = AuthViewSet.as_view({
'post': 'login'
})
logout_view = AuthViewSet.as_view({"post": "logout"})
logout_view = AuthViewSet.as_view({
'post': 'logout'
})
# 🔹 Define API URL patterns
urlpatterns = [
# Authentication
path("auth/login/", auth_view, name="user-login"),
path("auth/logout/", logout_view, name="user-logout"),
path('auth/login/', auth_view, name='user-login'),
path('auth/logout/', logout_view, name='user-logout'),
# Superuser API
path("initialize-superuser/", initialize_superuser, name="initialize_superuser"),
path('initialize-superuser/', initialize_superuser, name='initialize_superuser'),
# Permissions API
path("permissions/", list_permissions, name="list-permissions"),
path("token/", TokenObtainPairView.as_view(), name="token_obtain_pair"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
path('permissions/', list_permissions, name='list-permissions'),
path('token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
]
# 🔹 Include ViewSet routes

View file

@ -2,110 +2,16 @@ from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import Group, Permission
from django.http import JsonResponse, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.decorators import api_view, permission_classes, action
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.response import Response
from rest_framework import viewsets, status
from rest_framework import viewsets
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
import json
from .permissions import IsAdmin, Authenticated
from dispatcharr.utils import network_access_allowed
from .models import User
from .serializers import UserSerializer, GroupSerializer, PermissionSerializer
from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView
class TokenObtainPairView(TokenObtainPairView):
def post(self, request, *args, **kwargs):
# Custom logic here
if not network_access_allowed(request, "UI"):
# Log blocked login attempt due to network restrictions
from core.utils import log_system_event
username = request.data.get("username", 'unknown')
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
log_system_event(
event_type='login_failed',
user=username,
client_ip=client_ip,
user_agent=user_agent,
reason='Network access denied',
)
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
# Get the response from the parent class first
username = request.data.get("username")
# Log login attempt
from core.utils import log_system_event
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
try:
response = super().post(request, *args, **kwargs)
# If login was successful, update last_login and log success
if response.status_code == 200:
if username:
from django.utils import timezone
try:
user = User.objects.get(username=username)
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
# Log successful login
log_system_event(
event_type='login_success',
user=username,
client_ip=client_ip,
user_agent=user_agent,
)
except User.DoesNotExist:
pass # User doesn't exist, but login somehow succeeded
else:
# Log failed login attempt
log_system_event(
event_type='login_failed',
user=username or 'unknown',
client_ip=client_ip,
user_agent=user_agent,
reason='Invalid credentials',
)
return response
except Exception as e:
# If parent class raises an exception (e.g., validation error), log failed attempt
log_system_event(
event_type='login_failed',
user=username or 'unknown',
client_ip=client_ip,
user_agent=user_agent,
reason=f'Authentication error: {str(e)[:100]}',
)
raise # Re-raise the exception to maintain normal error flow
class TokenRefreshView(TokenRefreshView):
def post(self, request, *args, **kwargs):
# Custom logic here
if not network_access_allowed(request, "UI"):
# Log blocked token refresh attempt due to network restrictions
from core.utils import log_system_event
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
log_system_event(
event_type='login_failed',
user='token_refresh',
client_ip=client_ip,
user_agent=user_agent,
reason='Network access denied (token refresh)',
)
return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN)
return super().post(request, *args, **kwargs)
@csrf_exempt # In production, consider CSRF protection strategies or ensure this endpoint is only accessible when no superuser exists.
def initialize_superuser(request):
@ -120,114 +26,56 @@ def initialize_superuser(request):
password = data.get("password")
email = data.get("email", "")
if not username or not password:
return JsonResponse(
{"error": "Username and password are required."}, status=400
)
return JsonResponse({"error": "Username and password are required."}, status=400)
# Create the superuser
User.objects.create_superuser(
username=username, password=password, email=email, user_level=10
)
User.objects.create_superuser(username=username, password=password, email=email)
return JsonResponse({"superuser_exists": True})
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)
# For GET requests, indicate no superuser exists
return JsonResponse({"superuser_exists": False})
# 🔹 1) Authentication APIs
class AuthViewSet(viewsets.ViewSet):
"""Handles user login and logout"""
def get_permissions(self):
"""
Login doesn't require auth, but logout does
"""
if self.action == 'logout':
from rest_framework.permissions import IsAuthenticated
return [IsAuthenticated()]
return []
@swagger_auto_schema(
operation_description="Authenticate and log in a user",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
required=["username", "password"],
required=['username', 'password'],
properties={
"username": openapi.Schema(type=openapi.TYPE_STRING),
"password": openapi.Schema(
type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD
),
'username': openapi.Schema(type=openapi.TYPE_STRING),
'password': openapi.Schema(type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD)
},
),
responses={200: "Login successful", 400: "Invalid credentials"},
)
def login(self, request):
"""Logs in a user and returns user details"""
username = request.data.get("username")
password = request.data.get("password")
username = request.data.get('username')
password = request.data.get('password')
user = authenticate(request, username=username, password=password)
# Get client info for logging
from core.utils import log_system_event
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
if user:
login(request, user)
# Update last_login timestamp
from django.utils import timezone
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
# Log successful login
log_system_event(
event_type='login_success',
user=username,
client_ip=client_ip,
user_agent=user_agent,
)
return Response(
{
"message": "Login successful",
"user": {
"id": user.id,
"username": user.username,
"email": user.email,
"groups": list(user.groups.values_list("name", flat=True)),
},
return Response({
"message": "Login successful",
"user": {
"id": user.id,
"username": user.username,
"email": user.email,
"groups": list(user.groups.values_list('name', flat=True))
}
)
# Log failed login attempt
log_system_event(
event_type='login_failed',
user=username or 'unknown',
client_ip=client_ip,
user_agent=user_agent,
reason='Invalid credentials',
)
})
return Response({"error": "Invalid credentials"}, status=400)
@swagger_auto_schema(
operation_description="Log out the current user",
responses={200: "Logout successful"},
responses={200: "Logout successful"}
)
def logout(self, request):
"""Logs out the authenticated user"""
# Log logout event before actually logging out
from core.utils import log_system_event
username = request.user.username if request.user and request.user.is_authenticated else 'unknown'
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
log_system_event(
event_type='logout',
user=username,
client_ip=client_ip,
user_agent=user_agent,
)
logout(request)
return Response({"message": "Logout successful"})
@ -235,19 +83,13 @@ class AuthViewSet(viewsets.ViewSet):
# 🔹 2) User Management APIs
class UserViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Users"""
queryset = User.objects.all().prefetch_related('channel_profiles')
queryset = User.objects.all()
serializer_class = UserSerializer
def get_permissions(self):
if self.action == "me":
return [Authenticated()]
return [IsAdmin()]
permission_classes = [IsAuthenticated]
@swagger_auto_schema(
operation_description="Retrieve a list of users",
responses={200: UserSerializer(many=True)},
responses={200: UserSerializer(many=True)}
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@ -268,28 +110,17 @@ class UserViewSet(viewsets.ModelViewSet):
def destroy(self, request, *args, **kwargs):
return super().destroy(request, *args, **kwargs)
@swagger_auto_schema(
method="get",
operation_description="Get active user information",
)
@action(detail=False, methods=["get"], url_path="me")
def me(self, request):
user = request.user
serializer = UserSerializer(user)
return Response(serializer.data)
# 🔹 3) Group Management APIs
class GroupViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Groups"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [Authenticated]
permission_classes = [IsAuthenticated]
@swagger_auto_schema(
operation_description="Retrieve a list of groups",
responses={200: GroupSerializer(many=True)},
responses={200: GroupSerializer(many=True)}
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@ -313,12 +144,12 @@ class GroupViewSet(viewsets.ModelViewSet):
# 🔹 4) Permissions List API
@swagger_auto_schema(
method="get",
method='get',
operation_description="Retrieve a list of all permissions",
responses={200: PermissionSerializer(many=True)},
responses={200: PermissionSerializer(many=True)}
)
@api_view(["GET"])
@permission_classes([Authenticated])
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def list_permissions(request):
"""Returns a list of all available permissions"""
permissions = Permission.objects.all()

View file

@ -1,7 +1,6 @@
from django.apps import AppConfig
class AccountsConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "apps.accounts"
default_auto_field = 'django.db.models.BigAutoField'
name = 'apps.accounts'
verbose_name = "Accounts & Authentication"

View file

@ -1,43 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-18 15:47
from django.db import migrations, models
def set_user_level_to_10(apps, schema_editor):
User = apps.get_model("accounts", "User")
User.objects.update(user_level=10)
class Migration(migrations.Migration):
dependencies = [
("accounts", "0001_initial"),
("dispatcharr_channels", "0021_channel_user_level"),
]
operations = [
migrations.RemoveField(
model_name="user",
name="channel_groups",
),
migrations.AddField(
model_name="user",
name="channel_profiles",
field=models.ManyToManyField(
blank=True,
related_name="users",
to="dispatcharr_channels.channelprofile",
),
),
migrations.AddField(
model_name="user",
name="user_level",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="user",
name="custom_properties",
field=models.TextField(blank=True, null=True),
),
migrations.RunPython(set_user_level_to_10),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-02 14:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_remove_user_channel_groups_user_channel_profiles_and_more'),
]
operations = [
migrations.AlterField(
model_name='user',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
]

View file

@ -2,26 +2,17 @@
from django.db import models
from django.contrib.auth.models import AbstractUser, Permission
class User(AbstractUser):
"""
Custom user model for Dispatcharr.
Inherits from Django's AbstractUser to add additional fields if needed.
"""
class UserLevel(models.IntegerChoices):
STREAMER = 0, "Streamer"
STANDARD = 1, "Standard User"
ADMIN = 10, "Admin"
avatar_config = models.JSONField(default=dict, blank=True, null=True)
channel_profiles = models.ManyToManyField(
"dispatcharr_channels.ChannelProfile",
channel_groups = models.ManyToManyField(
'dispatcharr_channels.ChannelGroup', # Updated reference to renamed model
blank=True,
related_name="users",
related_name="users"
)
user_level = models.IntegerField(default=UserLevel.STREAMER)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
def __str__(self):
return self.username

View file

@ -1,56 +0,0 @@
from rest_framework.permissions import IsAuthenticated
from .models import User
from dispatcharr.utils import network_access_allowed
class Authenticated(IsAuthenticated):
def has_permission(self, request, view):
is_authenticated = super().has_permission(request, view)
network_allowed = network_access_allowed(request, "UI")
return is_authenticated and network_allowed
class IsStandardUser(Authenticated):
def has_permission(self, request, view):
if not super().has_permission(request, view):
return False
return request.user and request.user.user_level >= User.UserLevel.STANDARD
class IsAdmin(Authenticated):
def has_permission(self, request, view):
if not super().has_permission(request, view):
return False
return request.user.user_level >= 10
class IsOwnerOfObject(Authenticated):
def has_object_permission(self, request, view, obj):
if not super().has_permission(request, view):
return False
is_admin = IsAdmin().has_permission(request, view)
is_owner = request.user in obj.users.all()
return is_admin or is_owner
permission_classes_by_action = {
"list": [IsStandardUser],
"create": [IsAdmin],
"retrieve": [IsStandardUser],
"update": [IsAdmin],
"partial_update": [IsAdmin],
"destroy": [IsAdmin],
}
permission_classes_by_method = {
"GET": [IsStandardUser],
"POST": [IsAdmin],
"PATCH": [IsAdmin],
"PUT": [IsAdmin],
"DELETE": [IsAdmin],
}

View file

@ -1,14 +1,13 @@
from rest_framework import serializers
from django.contrib.auth.models import Group, Permission
from .models import User
from apps.channels.models import ChannelProfile
# 🔹 Fix for Permission serialization
class PermissionSerializer(serializers.ModelSerializer):
class Meta:
model = Permission
fields = ["id", "name", "codename"]
fields = ['id', 'name', 'codename']
# 🔹 Fix for Group serialization
@ -19,61 +18,15 @@ class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ["id", "name", "permissions"]
fields = ['id', 'name', 'permissions']
# 🔹 Fix for User serialization
class UserSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True)
channel_profiles = serializers.PrimaryKeyRelatedField(
queryset=ChannelProfile.objects.all(), many=True, required=False
)
groups = serializers.SlugRelatedField(
many=True, queryset=Group.objects.all(), slug_field="name"
) # ✅ Fix ManyToMany `_meta` error
class Meta:
model = User
fields = [
"id",
"username",
"email",
"user_level",
"password",
"channel_profiles",
"custom_properties",
"avatar_config",
"is_active",
"is_staff",
"is_superuser",
"last_login",
"date_joined",
"first_name",
"last_name",
]
def create(self, validated_data):
channel_profiles = validated_data.pop("channel_profiles", [])
user = User(**validated_data)
user.set_password(validated_data["password"])
user.is_active = True
user.save()
user.channel_profiles.set(channel_profiles)
return user
def update(self, instance, validated_data):
password = validated_data.pop("password", None)
channel_profiles = validated_data.pop("channel_profiles", None)
for attr, value in validated_data.items():
setattr(instance, attr, value)
if password:
instance.set_password(password)
instance.save()
if channel_profiles is not None:
instance.channel_profiles.set(channel_profiles)
return instance
fields = ['id', 'username', 'email', 'groups']

View file

@ -5,7 +5,6 @@ from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import User
@receiver(post_save, sender=User)
def handle_new_user(sender, instance, created, **kwargs):
if created:

View file

@ -1,10 +1,11 @@
from django.urls import path, include, re_path
from django.urls import path, include
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from rest_framework.permissions import AllowAny
app_name = 'api'
# Configure Swagger Schema
schema_view = get_schema_view(
openapi.Info(
title="Dispatcharr API",
@ -25,9 +26,6 @@ urlpatterns = [
path('hdhr/', include(('apps.hdhr.api_urls', 'hdhr'), namespace='hdhr')),
path('m3u/', include(('apps.m3u.api_urls', 'm3u'), namespace='m3u')),
path('core/', include(('core.api_urls', 'core'), namespace='core')),
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')),
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
@ -36,7 +34,7 @@ urlpatterns = [
# Swagger Documentation api_urls
re_path(r'^swagger/?$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
path('swagger.json', schema_view.without_ui(cache_timeout=0), name='schema-json'),
]

View file

@ -1,18 +0,0 @@
from django.urls import path
from . import api_views
app_name = "backups"
urlpatterns = [
path("", api_views.list_backups, name="backup-list"),
path("create/", api_views.create_backup, name="backup-create"),
path("upload/", api_views.upload_backup, name="backup-upload"),
path("schedule/", api_views.get_schedule, name="backup-schedule-get"),
path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"),
path("status/<str:task_id>/", api_views.backup_status, name="backup-status"),
path("<str:filename>/download-token/", api_views.get_download_token, name="backup-download-token"),
path("<str:filename>/download/", api_views.download_backup, name="backup-download"),
path("<str:filename>/delete/", api_views.delete_backup, name="backup-delete"),
path("<str:filename>/restore/", api_views.restore_backup, name="backup-restore"),
]

View file

@ -1,364 +0,0 @@
import hashlib
import hmac
import logging
import os
from pathlib import Path
from celery.result import AsyncResult
from django.conf import settings
from django.http import HttpResponse, StreamingHttpResponse, Http404
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes, parser_classes
from rest_framework.permissions import IsAdminUser, AllowAny
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.response import Response
from . import services
from .tasks import create_backup_task, restore_backup_task
from .scheduler import get_schedule_settings, update_schedule_settings
logger = logging.getLogger(__name__)
def _generate_task_token(task_id: str) -> str:
"""Generate a signed token for task status access without auth."""
secret = settings.SECRET_KEY.encode()
return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32]
def _verify_task_token(task_id: str, token: str) -> bool:
"""Verify a task token is valid."""
expected = _generate_task_token(task_id)
return hmac.compare_digest(expected, token)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def list_backups(request):
"""List all available backup files."""
try:
backups = services.list_backups()
return Response(backups, status=status.HTTP_200_OK)
except Exception as e:
return Response(
{"detail": f"Failed to list backups: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
def create_backup(request):
"""Create a new backup (async via Celery)."""
try:
task = create_backup_task.delay()
return Response(
{
"detail": "Backup started",
"task_id": task.id,
"task_token": _generate_task_token(task.id),
},
status=status.HTTP_202_ACCEPTED,
)
except Exception as e:
return Response(
{"detail": f"Failed to start backup: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([AllowAny])
def backup_status(request, task_id):
"""Check the status of a backup/restore task.
Requires either:
- Valid admin authentication, OR
- Valid task_token query parameter
"""
# Check for token-based auth (for restore when session is invalidated)
token = request.query_params.get("token")
if token:
if not _verify_task_token(task_id, token):
return Response(
{"detail": "Invalid task token"},
status=status.HTTP_403_FORBIDDEN,
)
else:
# Fall back to admin auth check
if not request.user.is_authenticated or not request.user.is_staff:
return Response(
{"detail": "Authentication required"},
status=status.HTTP_401_UNAUTHORIZED,
)
try:
result = AsyncResult(task_id)
if result.ready():
task_result = result.get()
if task_result.get("status") == "completed":
return Response({
"state": "completed",
"result": task_result,
})
else:
return Response({
"state": "failed",
"error": task_result.get("error", "Unknown error"),
})
elif result.failed():
return Response({
"state": "failed",
"error": str(result.result),
})
else:
return Response({
"state": result.state.lower(),
})
except Exception as e:
return Response(
{"detail": f"Failed to get task status: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def get_download_token(request, filename):
"""Get a signed token for downloading a backup file."""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise Http404("Backup file not found")
token = _generate_task_token(filename)
return Response({"token": token})
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Failed to generate token: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([AllowAny])
def download_backup(request, filename):
"""Download a backup file.
Requires either:
- Valid admin authentication, OR
- Valid download_token query parameter
"""
# Check for token-based auth (avoids CORS preflight issues)
token = request.query_params.get("token")
if token:
if not _verify_task_token(filename, token):
return Response(
{"detail": "Invalid download token"},
status=status.HTTP_403_FORBIDDEN,
)
else:
# Fall back to admin auth check
if not request.user.is_authenticated or not request.user.is_staff:
return Response(
{"detail": "Authentication required"},
status=status.HTTP_401_UNAUTHORIZED,
)
try:
# Security: prevent path traversal by checking for suspicious characters
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = (backup_dir / filename).resolve()
# Security: ensure the resolved path is still within backup_dir
if not str(backup_file).startswith(str(backup_dir.resolve())):
raise Http404("Invalid filename")
if not backup_file.exists() or not backup_file.is_file():
raise Http404("Backup file not found")
file_size = backup_file.stat().st_size
# Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly
# Fall back to streaming for non-nginx deployments
use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true"
logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}")
if use_nginx_accel:
# X-Accel-Redirect: Django returns immediately, nginx serves file
logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}")
response = HttpResponse()
response["X-Accel-Redirect"] = f"/protected-backups/{filename}"
response["Content-Type"] = "application/zip"
response["Content-Length"] = file_size
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
else:
# Streaming fallback for non-nginx deployments
logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)")
def file_iterator(file_path, chunk_size=2 * 1024 * 1024):
with open(file_path, "rb") as f:
while chunk := f.read(chunk_size):
yield chunk
response = StreamingHttpResponse(
file_iterator(backup_file),
content_type="application/zip",
)
response["Content-Length"] = file_size
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Download failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["DELETE"])
@permission_classes([IsAdminUser])
def delete_backup(request, filename):
"""Delete a backup file."""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
services.delete_backup(filename)
return Response(
{"detail": "Backup deleted successfully"},
status=status.HTTP_204_NO_CONTENT,
)
except FileNotFoundError:
raise Http404("Backup file not found")
except Exception as e:
return Response(
{"detail": f"Delete failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
@parser_classes([MultiPartParser, FormParser])
def upload_backup(request):
"""Upload a backup file for restoration."""
uploaded = request.FILES.get("file")
if not uploaded:
return Response(
{"detail": "No file uploaded"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
backup_dir = services.get_backup_dir()
filename = uploaded.name or "uploaded-backup.zip"
# Ensure unique filename
backup_file = backup_dir / filename
counter = 1
while backup_file.exists():
name_parts = filename.rsplit(".", 1)
if len(name_parts) == 2:
backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}"
else:
backup_file = backup_dir / f"{filename}-{counter}"
counter += 1
# Save uploaded file
with backup_file.open("wb") as f:
for chunk in uploaded.chunks():
f.write(chunk)
return Response(
{
"detail": "Backup uploaded successfully",
"filename": backup_file.name,
},
status=status.HTTP_201_CREATED,
)
except Exception as e:
return Response(
{"detail": f"Upload failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
def restore_backup(request, filename):
"""Restore from a backup file (async via Celery). WARNING: This will flush the database!"""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise Http404("Backup file not found")
task = restore_backup_task.delay(filename)
return Response(
{
"detail": "Restore started",
"task_id": task.id,
"task_token": _generate_task_token(task.id),
},
status=status.HTTP_202_ACCEPTED,
)
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Failed to start restore: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def get_schedule(request):
"""Get backup schedule settings."""
try:
settings = get_schedule_settings()
return Response(settings)
except Exception as e:
return Response(
{"detail": f"Failed to get schedule: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["PUT"])
@permission_classes([IsAdminUser])
def update_schedule(request):
"""Update backup schedule settings."""
try:
settings = update_schedule_settings(request.data)
return Response(settings)
except ValueError as e:
return Response(
{"detail": str(e)},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
return Response(
{"detail": f"Failed to update schedule: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View file

@ -1,7 +0,0 @@
from django.apps import AppConfig
class BackupsConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "apps.backups"
verbose_name = "Backups"

View file

View file

@ -1,202 +0,0 @@
import json
import logging
from django_celery_beat.models import PeriodicTask, CrontabSchedule
from core.models import CoreSettings
logger = logging.getLogger(__name__)
BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task"
DEFAULTS = {
"schedule_enabled": True,
"schedule_frequency": "daily",
"schedule_time": "03:00",
"schedule_day_of_week": 0, # Sunday
"retention_count": 3,
"schedule_cron_expression": "",
}
def _get_backup_settings():
"""Get all backup settings from CoreSettings grouped JSON."""
try:
settings_obj = CoreSettings.objects.get(key="backup_settings")
return settings_obj.value if isinstance(settings_obj.value, dict) else DEFAULTS.copy()
except CoreSettings.DoesNotExist:
return DEFAULTS.copy()
def _update_backup_settings(updates: dict) -> None:
"""Update backup settings in the grouped JSON."""
obj, created = CoreSettings.objects.get_or_create(
key="backup_settings",
defaults={"name": "Backup Settings", "value": DEFAULTS.copy()}
)
current = obj.value if isinstance(obj.value, dict) else {}
current.update(updates)
obj.value = current
obj.save()
def get_schedule_settings() -> dict:
"""Get all backup schedule settings."""
settings = _get_backup_settings()
return {
"enabled": bool(settings.get("schedule_enabled", DEFAULTS["schedule_enabled"])),
"frequency": str(settings.get("schedule_frequency", DEFAULTS["schedule_frequency"])),
"time": str(settings.get("schedule_time", DEFAULTS["schedule_time"])),
"day_of_week": int(settings.get("schedule_day_of_week", DEFAULTS["schedule_day_of_week"])),
"retention_count": int(settings.get("retention_count", DEFAULTS["retention_count"])),
"cron_expression": str(settings.get("schedule_cron_expression", DEFAULTS["schedule_cron_expression"])),
}
def update_schedule_settings(data: dict) -> dict:
"""Update backup schedule settings and sync the PeriodicTask."""
# Validate
if "frequency" in data and data["frequency"] not in ("daily", "weekly"):
raise ValueError("frequency must be 'daily' or 'weekly'")
if "time" in data:
try:
hour, minute = data["time"].split(":")
int(hour)
int(minute)
except (ValueError, AttributeError):
raise ValueError("time must be in HH:MM format")
if "day_of_week" in data:
day = int(data["day_of_week"])
if day < 0 or day > 6:
raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)")
if "retention_count" in data:
count = int(data["retention_count"])
if count < 0:
raise ValueError("retention_count must be >= 0")
# Update settings with proper key names
updates = {}
if "enabled" in data:
updates["schedule_enabled"] = bool(data["enabled"])
if "frequency" in data:
updates["schedule_frequency"] = str(data["frequency"])
if "time" in data:
updates["schedule_time"] = str(data["time"])
if "day_of_week" in data:
updates["schedule_day_of_week"] = int(data["day_of_week"])
if "retention_count" in data:
updates["retention_count"] = int(data["retention_count"])
if "cron_expression" in data:
updates["schedule_cron_expression"] = str(data["cron_expression"])
_update_backup_settings(updates)
# Sync the periodic task
_sync_periodic_task()
return get_schedule_settings()
def _sync_periodic_task() -> None:
"""Create, update, or delete the scheduled backup task based on settings."""
settings = get_schedule_settings()
if not settings["enabled"]:
# Delete the task if it exists
task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first()
if task:
old_crontab = task.crontab
task.delete()
_cleanup_orphaned_crontab(old_crontab)
logger.info("Backup schedule disabled, removed periodic task")
return
# Get old crontab before creating new one
old_crontab = None
try:
old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME)
old_crontab = old_task.crontab
except PeriodicTask.DoesNotExist:
pass
# Check if using cron expression (advanced mode)
if settings["cron_expression"]:
# Parse cron expression: "minute hour day month weekday"
try:
parts = settings["cron_expression"].split()
if len(parts) != 5:
raise ValueError("Cron expression must have 5 parts: minute hour day month weekday")
minute, hour, day_of_month, month_of_year, day_of_week = parts
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week=day_of_week,
day_of_month=day_of_month,
month_of_year=month_of_year,
timezone=CoreSettings.get_system_time_zone(),
)
except Exception as e:
logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}")
raise ValueError(f"Invalid cron expression: {e}")
else:
# Use simple frequency-based scheduling
# Parse time
hour, minute = settings["time"].split(":")
# Build crontab based on frequency
system_tz = CoreSettings.get_system_time_zone()
if settings["frequency"] == "daily":
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week="*",
day_of_month="*",
month_of_year="*",
timezone=system_tz,
)
else: # weekly
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week=str(settings["day_of_week"]),
day_of_month="*",
month_of_year="*",
timezone=system_tz,
)
# Create or update the periodic task
task, created = PeriodicTask.objects.update_or_create(
name=BACKUP_SCHEDULE_TASK_NAME,
defaults={
"task": "apps.backups.tasks.scheduled_backup_task",
"crontab": crontab,
"enabled": True,
"kwargs": json.dumps({"retention_count": settings["retention_count"]}),
},
)
# Clean up old crontab if it changed and is orphaned
if old_crontab and old_crontab.id != crontab.id:
_cleanup_orphaned_crontab(old_crontab)
action = "Created" if created else "Updated"
logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}")
def _cleanup_orphaned_crontab(crontab_schedule):
"""Delete old CrontabSchedule if no other tasks are using it."""
if crontab_schedule is None:
return
# Check if any other tasks are using this crontab
if PeriodicTask.objects.filter(crontab=crontab_schedule).exists():
logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting")
return
logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}")
crontab_schedule.delete()

View file

@ -1,350 +0,0 @@
import datetime
import json
import os
import shutil
import subprocess
import tempfile
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
import logging
import pytz
from django.conf import settings
from core.models import CoreSettings
logger = logging.getLogger(__name__)
def get_backup_dir() -> Path:
"""Get the backup directory, creating it if necessary."""
backup_dir = Path(settings.BACKUP_ROOT)
backup_dir.mkdir(parents=True, exist_ok=True)
return backup_dir
def _is_postgresql() -> bool:
"""Check if we're using PostgreSQL."""
return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql"
def _get_pg_env() -> dict:
"""Get environment variables for PostgreSQL commands."""
db_config = settings.DATABASES["default"]
env = os.environ.copy()
env["PGPASSWORD"] = db_config.get("PASSWORD", "")
return env
def _get_pg_args() -> list[str]:
"""Get common PostgreSQL command arguments."""
db_config = settings.DATABASES["default"]
return [
"-h", db_config.get("HOST", "localhost"),
"-p", str(db_config.get("PORT", 5432)),
"-U", db_config.get("USER", "postgres"),
"-d", db_config.get("NAME", "dispatcharr"),
]
def _dump_postgresql(output_file: Path) -> None:
"""Dump PostgreSQL database using pg_dump."""
logger.info("Dumping PostgreSQL database with pg_dump...")
cmd = [
"pg_dump",
*_get_pg_args(),
"-Fc", # Custom format for pg_restore
"-v", # Verbose
"-f", str(output_file),
]
result = subprocess.run(
cmd,
env=_get_pg_env(),
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"pg_dump failed: {result.stderr}")
raise RuntimeError(f"pg_dump failed: {result.stderr}")
logger.debug(f"pg_dump output: {result.stderr}")
def _clean_postgresql_schema() -> None:
"""Drop and recreate the public schema to ensure a completely clean restore."""
logger.info("[PG_CLEAN] Dropping and recreating public schema...")
# Commands to drop and recreate schema
sql_commands = "DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO public;"
cmd = [
"psql",
*_get_pg_args(),
"-c", sql_commands,
]
result = subprocess.run(
cmd,
env=_get_pg_env(),
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"[PG_CLEAN] Failed to clean schema: {result.stderr}")
raise RuntimeError(f"Failed to clean PostgreSQL schema: {result.stderr}")
logger.info("[PG_CLEAN] Schema cleaned successfully")
def _restore_postgresql(dump_file: Path) -> None:
"""Restore PostgreSQL database using pg_restore."""
logger.info("[PG_RESTORE] Starting pg_restore...")
logger.info(f"[PG_RESTORE] Dump file: {dump_file}")
# Drop and recreate schema to ensure a completely clean restore
_clean_postgresql_schema()
pg_args = _get_pg_args()
logger.info(f"[PG_RESTORE] Connection args: {pg_args}")
cmd = [
"pg_restore",
"--no-owner", # Skip ownership commands (we already created schema)
*pg_args,
"-v", # Verbose
str(dump_file),
]
logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}")
result = subprocess.run(
cmd,
env=_get_pg_env(),
capture_output=True,
text=True,
)
logger.info(f"[PG_RESTORE] Return code: {result.returncode}")
# pg_restore may return non-zero even on partial success
# Check for actual errors vs warnings
if result.returncode != 0:
# Some errors during restore are expected (e.g., "does not exist" when cleaning)
# Only fail on critical errors
stderr = result.stderr.lower()
if "fatal" in stderr or "could not connect" in stderr:
logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}")
raise RuntimeError(f"pg_restore failed: {result.stderr}")
else:
logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...")
logger.info("[PG_RESTORE] Completed successfully")
def _dump_sqlite(output_file: Path) -> None:
"""Dump SQLite database using sqlite3 .backup command."""
logger.info("Dumping SQLite database with sqlite3 .backup...")
db_path = Path(settings.DATABASES["default"]["NAME"])
if not db_path.exists():
raise FileNotFoundError(f"SQLite database not found: {db_path}")
# Use sqlite3 .backup command via stdin for reliable execution
result = subprocess.run(
["sqlite3", str(db_path)],
input=f".backup '{output_file}'\n",
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"sqlite3 backup failed: {result.stderr}")
raise RuntimeError(f"sqlite3 backup failed: {result.stderr}")
# Verify the backup file was created
if not output_file.exists():
raise RuntimeError("sqlite3 backup failed: output file not created")
logger.info(f"sqlite3 backup completed successfully: {output_file}")
def _restore_sqlite(dump_file: Path) -> None:
"""Restore SQLite database by replacing the database file."""
logger.info("Restoring SQLite database...")
db_path = Path(settings.DATABASES["default"]["NAME"])
backup_current = None
# Backup current database before overwriting
if db_path.exists():
backup_current = db_path.with_suffix(".db.bak")
shutil.copy2(db_path, backup_current)
logger.info(f"Backed up current database to {backup_current}")
# Ensure parent directory exists
db_path.parent.mkdir(parents=True, exist_ok=True)
# The backup file from _dump_sqlite is a complete SQLite database file
# We can simply copy it over the existing database
shutil.copy2(dump_file, db_path)
# Verify the restore worked by checking if sqlite3 can read it
result = subprocess.run(
["sqlite3", str(db_path)],
input=".tables\n",
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"sqlite3 verification failed: {result.stderr}")
# Try to restore from backup
if backup_current and backup_current.exists():
shutil.copy2(backup_current, db_path)
logger.info("Restored original database from backup")
raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}")
logger.info("sqlite3 restore completed successfully")
def create_backup() -> Path:
"""
Create a backup archive containing database dump and data directories.
Returns the path to the created backup file.
"""
backup_dir = get_backup_dir()
# Use system timezone for filename (user-friendly), but keep internal timestamps as UTC
system_tz_name = CoreSettings.get_system_time_zone()
try:
system_tz = pytz.timezone(system_tz_name)
now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz)
timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S")
except Exception as e:
logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC")
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
backup_name = f"dispatcharr-backup-{timestamp}.zip"
backup_file = backup_dir / backup_name
logger.info(f"Creating backup: {backup_name}")
with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir:
temp_path = Path(temp_dir)
# Determine database type and dump accordingly
if _is_postgresql():
db_dump_file = temp_path / "database.dump"
_dump_postgresql(db_dump_file)
db_type = "postgresql"
else:
db_dump_file = temp_path / "database.sqlite3"
_dump_sqlite(db_dump_file)
db_type = "sqlite"
# Create ZIP archive with compression and ZIP64 support for large files
with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
# Add database dump
zip_file.write(db_dump_file, db_dump_file.name)
# Add metadata
metadata = {
"format": "dispatcharr-backup",
"version": 2,
"database_type": db_type,
"database_file": db_dump_file.name,
"created_at": datetime.datetime.now(datetime.UTC).isoformat(),
}
zip_file.writestr("metadata.json", json.dumps(metadata, indent=2))
logger.info(f"Backup created successfully: {backup_file}")
return backup_file
def restore_backup(backup_file: Path) -> None:
"""
Restore from a backup archive.
WARNING: This will overwrite the database!
"""
if not backup_file.exists():
raise FileNotFoundError(f"Backup file not found: {backup_file}")
logger.info(f"Restoring from backup: {backup_file}")
with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir:
temp_path = Path(temp_dir)
# Extract backup
logger.debug("Extracting backup archive...")
with ZipFile(backup_file, "r") as zip_file:
zip_file.extractall(temp_path)
# Read metadata
metadata_file = temp_path / "metadata.json"
if not metadata_file.exists():
raise ValueError("Invalid backup: missing metadata.json")
with open(metadata_file) as f:
metadata = json.load(f)
# Restore database
_restore_database(temp_path, metadata)
logger.info("Restore completed successfully")
def _restore_database(temp_path: Path, metadata: dict) -> None:
"""Restore database from backup."""
db_type = metadata.get("database_type", "postgresql")
db_file = metadata.get("database_file", "database.dump")
dump_file = temp_path / db_file
if not dump_file.exists():
raise ValueError(f"Invalid backup: missing {db_file}")
current_db_type = "postgresql" if _is_postgresql() else "sqlite"
if db_type != current_db_type:
raise ValueError(
f"Database type mismatch: backup is {db_type}, "
f"but current database is {current_db_type}"
)
if db_type == "postgresql":
_restore_postgresql(dump_file)
else:
_restore_sqlite(dump_file)
def list_backups() -> list[dict]:
"""List all available backup files with metadata."""
backup_dir = get_backup_dir()
backups = []
for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True):
# Use UTC timezone so frontend can convert to user's local time
created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC)
backups.append({
"name": backup_file.name,
"size": backup_file.stat().st_size,
"created": created_time.isoformat(),
})
return backups
def delete_backup(filename: str) -> None:
"""Delete a backup file."""
backup_dir = get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise FileNotFoundError(f"Backup file not found: {filename}")
if not backup_file.is_file():
raise ValueError(f"Invalid backup file: {filename}")
backup_file.unlink()
logger.info(f"Deleted backup: {filename}")

View file

@ -1,106 +0,0 @@
import logging
import traceback
from celery import shared_task
from . import services
logger = logging.getLogger(__name__)
def _cleanup_old_backups(retention_count: int) -> int:
"""Delete old backups, keeping only the most recent N. Returns count deleted."""
if retention_count <= 0:
return 0
backups = services.list_backups()
if len(backups) <= retention_count:
return 0
# Backups are sorted newest first, so delete from the end
to_delete = backups[retention_count:]
deleted = 0
for backup in to_delete:
try:
services.delete_backup(backup["name"])
deleted += 1
logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}")
except Exception as e:
logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}")
return deleted
@shared_task(bind=True)
def create_backup_task(self):
"""Celery task to create a backup asynchronously."""
try:
logger.info(f"[BACKUP] Starting backup task {self.request.id}")
backup_file = services.create_backup()
logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}")
return {
"status": "completed",
"filename": backup_file.name,
"size": backup_file.stat().st_size,
}
except Exception as e:
logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}")
logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}
@shared_task(bind=True)
def restore_backup_task(self, filename: str):
"""Celery task to restore a backup asynchronously."""
try:
logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
logger.info(f"[RESTORE] Backup file path: {backup_file}")
services.restore_backup(backup_file)
logger.info(f"[RESTORE] Task {self.request.id} completed successfully")
return {
"status": "completed",
"filename": filename,
}
except Exception as e:
logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}")
logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}
@shared_task(bind=True)
def scheduled_backup_task(self, retention_count: int = 0):
"""Celery task for scheduled backups with optional retention cleanup."""
try:
logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}")
# Create backup
backup_file = services.create_backup()
logger.info(f"[SCHEDULED] Backup created: {backup_file.name}")
# Cleanup old backups if retention is set
deleted = 0
if retention_count > 0:
deleted = _cleanup_old_backups(retention_count)
logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)")
return {
"status": "completed",
"filename": backup_file.name,
"size": backup_file.stat().st_size,
"deleted_count": deleted,
}
except Exception as e:
logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}")
logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}

File diff suppressed because it is too large Load diff

View file

@ -6,21 +6,11 @@ from .api_views import (
ChannelGroupViewSet,
BulkDeleteStreamsAPIView,
BulkDeleteChannelsAPIView,
BulkDeleteLogosAPIView,
CleanupUnusedLogosAPIView,
LogoViewSet,
ChannelProfileViewSet,
UpdateChannelMembershipAPIView,
BulkUpdateChannelMembershipAPIView,
RecordingViewSet,
RecurringRecordingRuleViewSet,
GetChannelStreamsAPIView,
SeriesRulesAPIView,
DeleteSeriesRuleAPIView,
EvaluateSeriesRulesAPIView,
BulkRemoveSeriesRecordingsAPIView,
BulkDeleteUpcomingRecordingsAPIView,
ComskipConfigAPIView,
)
app_name = 'channels' # for DRF routing
@ -32,24 +22,13 @@ router.register(r'channels', ChannelViewSet, basename='channel')
router.register(r'logos', LogoViewSet, basename='logo')
router.register(r'profiles', ChannelProfileViewSet, basename='profile')
router.register(r'recordings', RecordingViewSet, basename='recording')
router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule')
urlpatterns = [
# Bulk delete is a single APIView, not a ViewSet
path('streams/bulk-delete/', BulkDeleteStreamsAPIView.as_view(), name='bulk_delete_streams'),
path('channels/bulk-delete/', BulkDeleteChannelsAPIView.as_view(), name='bulk_delete_channels'),
path('logos/bulk-delete/', BulkDeleteLogosAPIView.as_view(), name='bulk_delete_logos'),
path('logos/cleanup/', CleanupUnusedLogosAPIView.as_view(), name='cleanup_unused_logos'),
path('channels/<int:channel_id>/streams/', GetChannelStreamsAPIView.as_view(), name='get_channel_streams'),
path('profiles/<int:profile_id>/channels/<int:channel_id>/', UpdateChannelMembershipAPIView.as_view(), name='update_channel_membership'),
path('profiles/<int:profile_id>/channels/bulk-update/', BulkUpdateChannelMembershipAPIView.as_view(), name='bulk_update_channel_membership'),
# DVR series rules (order matters: specific routes before catch-all slug)
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
]
urlpatterns += router.urls

File diff suppressed because it is too large Load diff

View file

@ -14,13 +14,6 @@ class ChannelGroupForm(forms.ModelForm):
# Channel Form
#
class ChannelForm(forms.ModelForm):
# Explicitly define channel_number as FloatField to ensure decimal values work
channel_number = forms.FloatField(
required=False,
widget=forms.NumberInput(attrs={'step': '0.1'}), # Allow decimal steps
help_text="Channel number can include decimals (e.g., 1.1, 2.5)"
)
channel_group = forms.ModelChoiceField(
queryset=ChannelGroup.objects.all(),
required=False,

View file

@ -1,38 +0,0 @@
# Generated by Django 5.1.6 on 2025-04-18 16:21
from django.db import migrations, models
from django.db.models import Count
def remove_duplicate_channel_streams(apps, schema_editor):
ChannelStream = apps.get_model('dispatcharr_channels', 'ChannelStream')
# Find duplicates by (channel, stream)
duplicates = (
ChannelStream.objects
.values('channel', 'stream')
.annotate(count=Count('id'))
.filter(count__gt=1)
)
for dupe in duplicates:
# Get all duplicates for this pair
dups = ChannelStream.objects.filter(
channel=dupe['channel'],
stream=dupe['stream']
).order_by('id')
# Keep the first one, delete the rest
dups.exclude(id=dups.first().id).delete()
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0015_recording_custom_properties'),
]
operations = [
migrations.RunPython(remove_duplicate_channel_streams),
migrations.AddConstraint(
model_name='channelstream',
constraint=models.UniqueConstraint(fields=('channel', 'stream'), name='unique_channel_stream'),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-04-21 20:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0016_channelstream_unique_channel_stream'),
]
operations = [
migrations.AlterField(
model_name='channelgroup',
name='name',
field=models.TextField(db_index=True, unique=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-04-27 14:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0017_alter_channelgroup_name'),
]
operations = [
migrations.AddField(
model_name='channelgroupm3uaccount',
name='custom_properties',
field=models.TextField(blank=True, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-04 00:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0018_channelgroupm3uaccount_custom_properties_and_more'),
]
operations = [
migrations.AddField(
model_name='channel',
name='tvc_guide_stationid',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-15 19:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0019_channel_tvc_guide_stationid'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='channel_number',
field=models.FloatField(db_index=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-18 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0020_alter_channel_channel_number'),
]
operations = [
migrations.AddField(
model_name='channel',
name='user_level',
field=models.IntegerField(default=0),
),
]

View file

@ -1,35 +0,0 @@
# Generated by Django 5.1.6 on 2025-07-13 23:08
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0021_channel_user_level'),
('m3u', '0012_alter_m3uaccount_refresh_interval'),
]
operations = [
migrations.AddField(
model_name='channel',
name='auto_created',
field=models.BooleanField(default=False, help_text='Whether this channel was automatically created via M3U auto channel sync'),
),
migrations.AddField(
model_name='channel',
name='auto_created_by',
field=models.ForeignKey(blank=True, help_text='The M3U account that auto-created this channel', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='auto_created_channels', to='m3u.m3uaccount'),
),
migrations.AddField(
model_name='channelgroupm3uaccount',
name='auto_channel_sync',
field=models.BooleanField(default=False, help_text='Automatically create/delete channels to match streams in this group'),
),
migrations.AddField(
model_name='channelgroupm3uaccount',
name='auto_sync_channel_start',
field=models.FloatField(blank=True, help_text='Starting channel number for auto-created channels in this group', null=True),
),
]

View file

@ -1,23 +0,0 @@
# Generated by Django 5.1.6 on 2025-07-29 02:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0022_channel_auto_created_channel_auto_created_by_and_more'),
]
operations = [
migrations.AddField(
model_name='stream',
name='stream_stats',
field=models.JSONField(blank=True, help_text='JSON object containing stream statistics like video codec, resolution, etc.', null=True),
),
migrations.AddField(
model_name='stream',
name='stream_stats_updated_at',
field=models.DateTimeField(blank=True, db_index=True, help_text='When stream statistics were last updated', null=True),
),
]

View file

@ -1,19 +0,0 @@
# Generated by Django 5.2.4 on 2025-08-22 20:14
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0023_stream_stream_stats_stream_stream_stats_updated_at'),
]
operations = [
migrations.AlterField(
model_name='channelgroupm3uaccount',
name='channel_group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_accounts', to='dispatcharr_channels.channelgroup'),
),
]

View file

@ -1,28 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-02 14:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0024_alter_channelgroupm3uaccount_channel_group'),
]
operations = [
migrations.AlterField(
model_name='channelgroupm3uaccount',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
migrations.AlterField(
model_name='recording',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
migrations.AlterField(
model_name='stream',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
]

View file

@ -1,31 +0,0 @@
# Generated by Django 5.0.14 on 2025-09-18 14:56
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'),
]
operations = [
migrations.CreateModel(
name='RecurringRecordingRule',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('days_of_week', models.JSONField(default=list)),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('enabled', models.BooleanField(default=True)),
('name', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')),
],
options={
'ordering': ['channel', 'start_time'],
},
),
]

View file

@ -1,23 +0,0 @@
# Generated by Django 5.2.4 on 2025-10-05 20:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0026_recurringrecordingrule'),
]
operations = [
migrations.AddField(
model_name='recurringrecordingrule',
name='end_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='recurringrecordingrule',
name='start_date',
field=models.DateField(blank=True, null=True),
),
]

View file

@ -1,25 +0,0 @@
# Generated by Django 5.2.4 on 2025-10-06 22:55
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0027_recurringrecordingrule_end_date_and_more'),
]
operations = [
migrations.AddField(
model_name='channel',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='Timestamp when this channel was created'),
preserve_default=False,
),
migrations.AddField(
model_name='channel',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='Timestamp when this channel was last updated'),
),
]

View file

@ -1,54 +0,0 @@
# Generated migration to backfill stream_hash for existing custom streams
from django.db import migrations
import hashlib
def backfill_custom_stream_hashes(apps, schema_editor):
"""
Generate stream_hash for all custom streams that don't have one.
Uses stream ID to create a stable hash that won't change when name/url is edited.
"""
Stream = apps.get_model('dispatcharr_channels', 'Stream')
custom_streams_without_hash = Stream.objects.filter(
is_custom=True,
stream_hash__isnull=True
)
updated_count = 0
for stream in custom_streams_without_hash:
# Generate a stable hash using the stream's ID
# This ensures the hash never changes even if name/url is edited
unique_string = f"custom_stream_{stream.id}"
stream.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
stream.save(update_fields=['stream_hash'])
updated_count += 1
if updated_count > 0:
print(f"Backfilled stream_hash for {updated_count} custom streams")
else:
print("No custom streams needed stream_hash backfill")
def reverse_backfill(apps, schema_editor):
"""
Reverse migration - clear stream_hash for custom streams.
Note: This will break preview functionality for custom streams.
"""
Stream = apps.get_model('dispatcharr_channels', 'Stream')
custom_streams = Stream.objects.filter(is_custom=True)
count = custom_streams.update(stream_hash=None)
print(f"Cleared stream_hash for {count} custom streams")
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0028_channel_created_at_channel_updated_at'),
]
operations = [
migrations.RunPython(backfill_custom_stream_hashes, reverse_backfill),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-10-28 20:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0029_backfill_custom_stream_hashes'),
]
operations = [
migrations.AlterField(
model_name='stream',
name='url',
field=models.URLField(blank=True, max_length=4096, null=True),
),
]

View file

@ -1,29 +0,0 @@
# Generated by Django 5.2.9 on 2026-01-09 18:19
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0030_alter_stream_url'),
]
operations = [
migrations.AddField(
model_name='channelgroupm3uaccount',
name='is_stale',
field=models.BooleanField(db_index=True, default=False, help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'),
),
migrations.AddField(
model_name='channelgroupm3uaccount',
name='last_seen',
field=models.DateTimeField(db_index=True, default=datetime.datetime.now, help_text='Last time this group was seen in the M3U source during a refresh'),
),
migrations.AddField(
model_name='stream',
name='is_stale',
field=models.BooleanField(db_index=True, default=False, help_text='Whether this stream is stale (not seen in recent refresh, pending deletion)'),
),
]

View file

@ -1,5 +1,6 @@
from django.db import models
from django.core.exceptions import ValidationError
from core.models import StreamProfile
from django.conf import settings
from core.models import StreamProfile, CoreSettings
from core.utils import RedisClient
@ -9,14 +10,12 @@ from datetime import datetime
import hashlib
import json
from apps.epg.models import EPGData
from apps.accounts.models import User
logger = logging.getLogger(__name__)
# If you have an M3UAccount model in apps.m3u, you can still import it:
from apps.m3u.models import M3UAccount
# Add fallback functions if Redis isn't available
def get_total_viewers(channel_id):
"""Get viewer count from Redis or return 0 if Redis isn't available"""
@ -27,9 +26,8 @@ def get_total_viewers(channel_id):
except Exception:
return 0
class ChannelGroup(models.Model):
name = models.TextField(unique=True, db_index=True)
name = models.CharField(max_length=100, unique=True)
def related_channels(self):
# local import if needed to avoid cyc. Usually fine in a single file though
@ -48,14 +46,12 @@ class ChannelGroup(models.Model):
return created_objects
class Stream(models.Model):
"""
Represents a single stream (e.g. from an M3U source or custom URL).
"""
name = models.CharField(max_length=255, default="Default Stream")
url = models.URLField(max_length=4096, blank=True, null=True)
url = models.URLField(max_length=2000, blank=True, null=True)
m3u_account = models.ForeignKey(
M3UAccount,
on_delete=models.CASCADE,
@ -65,7 +61,7 @@ class Stream(models.Model):
)
logo_url = models.TextField(blank=True, null=True)
tvg_id = models.CharField(max_length=255, blank=True, null=True)
local_file = models.FileField(upload_to="uploads/", blank=True, null=True)
local_file = models.FileField(upload_to='uploads/', blank=True, null=True)
current_viewers = models.PositiveIntegerField(default=0)
updated_at = models.DateTimeField(auto_now=True)
channel_group = models.ForeignKey(
@ -73,18 +69,18 @@ class Stream(models.Model):
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="streams",
related_name='streams'
)
stream_profile = models.ForeignKey(
StreamProfile,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="streams",
related_name='streams'
)
is_custom = models.BooleanField(
default=False,
help_text="Whether this is a user-created stream or from an M3U account",
help_text="Whether this is a user-created stream or from an M3U account"
)
stream_hash = models.CharField(
max_length=255,
@ -94,48 +90,30 @@ class Stream(models.Model):
db_index=True,
)
last_seen = models.DateTimeField(db_index=True, default=datetime.now)
is_stale = models.BooleanField(
default=False,
db_index=True,
help_text="Whether this stream is stale (not seen in recent refresh, pending deletion)"
)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
# Stream statistics fields
stream_stats = models.JSONField(
null=True,
blank=True,
help_text="JSON object containing stream statistics like video codec, resolution, etc."
)
stream_stats_updated_at = models.DateTimeField(
null=True,
blank=True,
help_text="When stream statistics were last updated",
db_index=True
)
custom_properties = models.TextField(null=True, blank=True)
class Meta:
# If you use m3u_account, you might do unique_together = ('name','url','m3u_account')
verbose_name = "Stream"
verbose_name_plural = "Streams"
ordering = ["-updated_at"]
ordering = ['-updated_at']
def __str__(self):
return self.name or self.url or f"Stream ID {self.id}"
@classmethod
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
def generate_hash_key(cls, name, url, tvg_id, keys=None):
if keys is None:
keys = CoreSettings.get_m3u_hash_key().split(",")
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
stream_parts = {
"name": name, "url": url, "tvg_id": tvg_id
}
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
# Serialize and hash the dictionary
serialized_obj = json.dumps(
hash_parts, sort_keys=True
) # sort_keys ensures consistent ordering
serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering
hash_object = hashlib.sha256(serialized_obj.encode())
return hash_object.hexdigest()
@ -151,23 +129,13 @@ class Stream(models.Model):
return stream, False # False means it was updated, not created
except cls.DoesNotExist:
# If it doesn't exist, create a new object with the given hash
fields_to_update["stream_hash"] = (
hash_value # Make sure the hash field is set
)
fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set
stream = cls.objects.create(**fields_to_update)
return stream, True # True means it was created
# @TODO: honor stream's stream profile
def get_stream_profile(self):
"""
Get the stream profile for this stream.
Uses the stream's own profile if set, otherwise returns the default.
"""
if self.stream_profile:
return self.stream_profile
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
return stream_profile
@ -185,9 +153,7 @@ class Stream(models.Model):
m3u_account = self.m3u_account
m3u_profiles = m3u_account.profiles.all()
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
profiles = [default_profile] + [
obj for obj in m3u_profiles if not obj.is_default
]
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
for profile in profiles:
logger.info(profile)
@ -202,19 +168,13 @@ class Stream(models.Model):
if profile.max_streams == 0 or current_connections < profile.max_streams:
# Start a new stream
redis_client.set(f"channel_stream:{self.id}", self.id)
redis_client.set(
f"stream_profile:{self.id}", profile.id
) # Store only the matched profile
redis_client.set(f"stream_profile:{self.id}", profile.id) # Store only the matched profile
# Increment connection count for profiles with limits
if profile.max_streams > 0:
redis_client.incr(profile_connections_key)
return (
self.id,
profile.id,
None,
) # Return newly assigned stream and matched profile
return self.id, profile.id, None # Return newly assigned stream and matched profile
# 4. No available streams
return None, None, None
@ -235,9 +195,7 @@ class Stream(models.Model):
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
profile_id = int(profile_id)
logger.debug(
f"Found profile ID {profile_id} associated with stream {stream_id}"
)
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
profile_connections_key = f"profile_connections:{profile_id}"
@ -246,45 +204,45 @@ class Stream(models.Model):
if current_count > 0:
redis_client.decr(profile_connections_key)
class ChannelManager(models.Manager):
def active(self):
return self.all()
class Channel(models.Model):
channel_number = models.FloatField(db_index=True)
channel_number = models.IntegerField()
name = models.CharField(max_length=255)
logo = models.ForeignKey(
"Logo",
'Logo',
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="channels",
related_name='channels',
)
# M2M to Stream now in the same file
streams = models.ManyToManyField(
Stream, blank=True, through="ChannelStream", related_name="channels"
Stream,
blank=True,
through='ChannelStream',
related_name='channels'
)
channel_group = models.ForeignKey(
"ChannelGroup",
'ChannelGroup',
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="channels",
help_text="Channel group this channel belongs to.",
related_name='channels',
help_text="Channel group this channel belongs to."
)
tvg_id = models.CharField(max_length=255, blank=True, null=True)
tvc_guide_stationid = models.CharField(max_length=255, blank=True, null=True)
epg_data = models.ForeignKey(
EPGData,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="channels",
related_name='channels'
)
stream_profile = models.ForeignKey(
@ -292,41 +250,16 @@ class Channel(models.Model):
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="channels",
related_name='channels'
)
uuid = models.UUIDField(
default=uuid.uuid4, editable=False, unique=True, db_index=True
)
user_level = models.IntegerField(default=0)
auto_created = models.BooleanField(
default=False,
help_text="Whether this channel was automatically created via M3U auto channel sync"
)
auto_created_by = models.ForeignKey(
"m3u.M3UAccount",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="auto_created_channels",
help_text="The M3U account that auto-created this channel"
)
created_at = models.DateTimeField(
auto_now_add=True,
help_text="Timestamp when this channel was created"
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Timestamp when this channel was last updated"
)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True)
def clean(self):
# Enforce unique channel_number within a given group
existing = Channel.objects.filter(
channel_number=self.channel_number, channel_group=self.channel_group
channel_number=self.channel_number,
channel_group=self.channel_group
).exclude(id=self.id)
if existing.exists():
raise ValidationError(
@ -338,7 +271,7 @@ class Channel(models.Model):
@classmethod
def get_next_available_channel_number(cls, starting_from=1):
used_numbers = set(cls.objects.all().values_list("channel_number", flat=True))
used_numbers = set(cls.objects.all().values_list('channel_number', flat=True))
n = starting_from
while n in used_numbers:
n += 1
@ -348,9 +281,7 @@ class Channel(models.Model):
def get_stream_profile(self):
stream_profile = self.stream_profile
if not stream_profile:
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
return stream_profile
@ -380,55 +311,44 @@ class Channel(models.Model):
profile_id = int(profile_id_bytes)
return stream_id, profile_id, None
except (ValueError, TypeError):
logger.debug(
f"Invalid profile ID retrieved from Redis: {profile_id_bytes}"
)
logger.debug(f"Invalid profile ID retrieved from Redis: {profile_id_bytes}")
except (ValueError, TypeError):
logger.debug(
f"Invalid stream ID retrieved from Redis: {stream_id_bytes}"
)
logger.debug(f"Invalid stream ID retrieved from Redis: {stream_id_bytes}")
# No existing active stream, attempt to assign a new one
has_streams_but_maxed_out = False
has_active_profiles = False
# Iterate through channel streams and their profiles
for stream in self.streams.all().order_by("channelstream__order"):
for stream in self.streams.all().order_by('channelstream__order'):
# Retrieve the M3U account associated with the stream.
m3u_account = stream.m3u_account
if not m3u_account:
logger.debug(f"Stream {stream.id} has no M3U account")
continue
if m3u_account.is_active == False:
logger.debug(f"M3U account {m3u_account.id} is inactive, skipping.")
continue
m3u_profiles = m3u_account.profiles.filter(is_active=True)
default_profile = next(
(obj for obj in m3u_profiles if obj.is_default), None
)
m3u_profiles = m3u_account.profiles.all()
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
if not default_profile:
logger.debug(f"M3U account {m3u_account.id} has no active default profile")
logger.debug(f"M3U account {m3u_account.id} has no default profile")
continue
profiles = [default_profile] + [
obj for obj in m3u_profiles if not obj.is_default
]
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
for profile in profiles:
# Skip inactive profiles
if not profile.is_active:
logger.debug(f"Skipping inactive profile {profile.id}")
continue
has_active_profiles = True
profile_connections_key = f"profile_connections:{profile.id}"
current_connections = int(
redis_client.get(profile_connections_key) or 0
)
current_connections = int(redis_client.get(profile_connections_key) or 0)
# Check if profile has available slots (or unlimited connections)
if (
profile.max_streams == 0
or current_connections < profile.max_streams
):
if profile.max_streams == 0 or current_connections < profile.max_streams:
# Start a new stream
redis_client.set(f"channel_stream:{self.id}", stream.id)
redis_client.set(f"stream_profile:{stream.id}", profile.id)
@ -437,23 +357,17 @@ class Channel(models.Model):
if profile.max_streams > 0:
redis_client.incr(profile_connections_key)
return (
stream.id,
profile.id,
None,
) # Return newly assigned stream and matched profile
return stream.id, profile.id, None # Return newly assigned stream and matched profile
else:
# This profile is at max connections
has_streams_but_maxed_out = True
logger.debug(
f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}"
)
logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}")
# No available streams - determine specific reason
if has_streams_but_maxed_out:
error_reason = "All active M3U profiles have reached maximum connection limits"
error_reason = "All M3U profiles have reached maximum connection limits"
elif has_active_profiles:
error_reason = "No compatible active profile found for any assigned stream"
error_reason = "No compatible profile found for any assigned stream"
else:
error_reason = "No active profiles found for any assigned stream"
@ -473,9 +387,7 @@ class Channel(models.Model):
redis_client.delete(f"channel_stream:{self.id}") # Remove active stream
stream_id = int(stream_id)
logger.debug(
f"Found stream ID {stream_id} associated with channel stream {self.id}"
)
logger.debug(f"Found stream ID {stream_id} associated with channel stream {self.id}")
# Get the matched profile for cleanup
profile_id = redis_client.get(f"stream_profile:{stream_id}")
@ -486,9 +398,7 @@ class Channel(models.Model):
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
profile_id = int(profile_id)
logger.debug(
f"Found profile ID {profile_id} associated with stream {stream_id}"
)
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
profile_connections_key = f"profile_connections:{profile_id}"
@ -497,70 +407,17 @@ class Channel(models.Model):
if current_count > 0:
redis_client.decr(profile_connections_key)
def update_stream_profile(self, new_profile_id):
"""
Updates the profile for the current stream and adjusts connection counts.
Args:
new_profile_id: The ID of the new stream profile to use
Returns:
bool: True if successful, False otherwise
"""
redis_client = RedisClient.get_client()
# Get current stream ID
stream_id_bytes = redis_client.get(f"channel_stream:{self.id}")
if not stream_id_bytes:
logger.debug("No active stream found for channel")
return False
stream_id = int(stream_id_bytes)
# Get current profile ID
current_profile_id_bytes = redis_client.get(f"stream_profile:{stream_id}")
if not current_profile_id_bytes:
logger.debug("No profile found for current stream")
return False
current_profile_id = int(current_profile_id_bytes)
# Don't do anything if the profile is already set to the requested one
if current_profile_id == new_profile_id:
return True
# Decrement connection count for old profile
old_profile_connections_key = f"profile_connections:{current_profile_id}"
old_count = int(redis_client.get(old_profile_connections_key) or 0)
if old_count > 0:
redis_client.decr(old_profile_connections_key)
# Update the profile mapping
redis_client.set(f"stream_profile:{stream_id}", new_profile_id)
# Increment connection count for new profile
new_profile_connections_key = f"profile_connections:{new_profile_id}"
redis_client.incr(new_profile_connections_key)
logger.info(
f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}"
)
return True
class ChannelProfile(models.Model):
name = models.CharField(max_length=100, unique=True)
class ChannelProfileMembership(models.Model):
channel_profile = models.ForeignKey(ChannelProfile, on_delete=models.CASCADE)
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
enabled = models.BooleanField(
default=True
) # Track if the channel is enabled for this group
enabled = models.BooleanField(default=True) # Track if the channel is enabled for this group
class Meta:
unique_together = ("channel_profile", "channel")
unique_together = ('channel_profile', 'channel')
class ChannelStream(models.Model):
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
@ -568,45 +425,23 @@ class ChannelStream(models.Model):
order = models.PositiveIntegerField(default=0) # Ordering field
class Meta:
ordering = ["order"] # Ensure streams are retrieved in order
constraints = [
models.UniqueConstraint(
fields=["channel", "stream"], name="unique_channel_stream"
)
]
ordering = ['order'] # Ensure streams are retrieved in order
class ChannelGroupM3UAccount(models.Model):
channel_group = models.ForeignKey(
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_accounts"
ChannelGroup,
on_delete=models.CASCADE,
related_name='m3u_account'
)
m3u_account = models.ForeignKey(
M3UAccount, on_delete=models.CASCADE, related_name="channel_group"
M3UAccount,
on_delete=models.CASCADE,
related_name='channel_group'
)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
enabled = models.BooleanField(default=True)
auto_channel_sync = models.BooleanField(
default=False,
help_text='Automatically create/delete channels to match streams in this group'
)
auto_sync_channel_start = models.FloatField(
null=True,
blank=True,
help_text='Starting channel number for auto-created channels in this group'
)
last_seen = models.DateTimeField(
default=datetime.now,
db_index=True,
help_text='Last time this group was seen in the M3U source during a refresh'
)
is_stale = models.BooleanField(
default=False,
db_index=True,
help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'
)
class Meta:
unique_together = ("channel_group", "m3u_account")
unique_together = ('channel_group', 'm3u_account')
def __str__(self):
return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})"
@ -619,47 +454,12 @@ class Logo(models.Model):
def __str__(self):
return self.name
class Recording(models.Model):
channel = models.ForeignKey(
"Channel", on_delete=models.CASCADE, related_name="recordings"
)
channel = models.ForeignKey("Channel", on_delete=models.CASCADE, related_name="recordings")
start_time = models.DateTimeField()
end_time = models.DateTimeField()
task_id = models.CharField(max_length=255, null=True, blank=True)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
custom_properties = models.TextField(null=True, blank=True)
def __str__(self):
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
class RecurringRecordingRule(models.Model):
"""Rule describing a recurring manual DVR schedule."""
channel = models.ForeignKey(
"Channel",
on_delete=models.CASCADE,
related_name="recurring_rules",
)
days_of_week = models.JSONField(default=list)
start_time = models.TimeField()
end_time = models.TimeField()
enabled = models.BooleanField(default=True)
name = models.CharField(max_length=255, blank=True)
start_date = models.DateField(null=True, blank=True)
end_date = models.DateField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["channel", "start_time"]
def __str__(self):
channel_name = getattr(self.channel, "name", str(self.channel_id))
return f"Recurring rule for {channel_name}"
def cleaned_days(self):
try:
return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6})
except Exception:
return []

View file

@ -1,234 +1,110 @@
import json
from datetime import datetime
from rest_framework import serializers
from .models import (
Stream,
Channel,
ChannelGroup,
ChannelStream,
ChannelGroupM3UAccount,
Logo,
ChannelProfile,
ChannelProfileMembership,
Recording,
RecurringRecordingRule,
)
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo, ChannelProfile, ChannelProfileMembership, Recording
from apps.epg.serializers import EPGDataSerializer
from core.models import StreamProfile
from apps.epg.models import EPGData
from django.urls import reverse
from rest_framework import serializers
from django.utils import timezone
from core.utils import validate_flexible_url
class LogoSerializer(serializers.ModelSerializer):
cache_url = serializers.SerializerMethodField()
channel_count = serializers.SerializerMethodField()
is_used = serializers.SerializerMethodField()
channel_names = serializers.SerializerMethodField()
class Meta:
model = Logo
fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"]
def validate_url(self, value):
"""Validate that the URL is unique for creation or update"""
if self.instance and self.instance.url == value:
return value
if Logo.objects.filter(url=value).exists():
raise serializers.ValidationError("A logo with this URL already exists.")
return value
def create(self, validated_data):
"""Handle logo creation with proper URL validation"""
return Logo.objects.create(**validated_data)
def update(self, instance, validated_data):
"""Handle logo updates"""
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
fields = ['id', 'name', 'url', 'cache_url']
def get_cache_url(self, obj):
# return f"/api/channels/logos/{obj.id}/cache/"
request = self.context.get("request")
request = self.context.get('request')
if request:
return request.build_absolute_uri(
reverse("api:channels:logo-cache", args=[obj.id])
)
return reverse("api:channels:logo-cache", args=[obj.id])
def get_channel_count(self, obj):
"""Get the number of channels using this logo"""
return obj.channels.count()
def get_is_used(self, obj):
"""Check if this logo is used by any channels"""
return obj.channels.exists()
def get_channel_names(self, obj):
"""Get the names of channels using this logo (limited to first 5)"""
names = []
# Get channel names
channels = obj.channels.all()[:5]
for channel in channels:
names.append(f"Channel: {channel.name}")
# Calculate total count for "more" message
total_count = self.get_channel_count(obj)
if total_count > 5:
names.append(f"...and {total_count - 5} more")
return names
return request.build_absolute_uri(reverse('api:channels:logo-cache', args=[obj.id]))
return reverse('api:channels:logo-cache', args=[obj.id])
#
# Stream
#
class StreamSerializer(serializers.ModelSerializer):
url = serializers.CharField(
required=False,
allow_blank=True,
allow_null=True,
validators=[validate_flexible_url]
)
stream_profile_id = serializers.PrimaryKeyRelatedField(
queryset=StreamProfile.objects.all(),
source="stream_profile",
source='stream_profile',
allow_null=True,
required=False,
required=False
)
read_only_fields = ["is_custom", "m3u_account", "stream_hash"]
read_only_fields = ['is_custom', 'm3u_account', 'stream_hash']
class Meta:
model = Stream
fields = [
"id",
"name",
"url",
"m3u_account", # Uncomment if using M3U fields
"logo_url",
"tvg_id",
"local_file",
"current_viewers",
"updated_at",
"last_seen",
"is_stale",
"stream_profile_id",
"is_custom",
"channel_group",
"stream_hash",
"stream_stats",
"stream_stats_updated_at",
'id',
'name',
'url',
'm3u_account', # Uncomment if using M3U fields
'logo_url',
'tvg_id',
'local_file',
'current_viewers',
'updated_at',
'stream_profile_id',
'is_custom',
'channel_group',
'stream_hash',
]
def get_fields(self):
fields = super().get_fields()
# Unable to edit specific properties if this stream was created from an M3U account
if (
self.instance
and getattr(self.instance, "m3u_account", None)
and not self.instance.is_custom
):
fields["id"].read_only = True
fields["name"].read_only = True
fields["url"].read_only = True
fields["m3u_account"].read_only = True
fields["tvg_id"].read_only = True
fields["channel_group"].read_only = True
if self.instance and getattr(self.instance, 'm3u_account', None) and not self.instance.is_custom:
fields['id'].read_only = True
fields['name'].read_only = True
fields['url'].read_only = True
fields['m3u_account'].read_only = True
fields['tvg_id'].read_only = True
fields['channel_group'].read_only = True
return fields
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
m3u_accounts = serializers.IntegerField(source="m3u_accounts.id", read_only=True)
enabled = serializers.BooleanField()
auto_channel_sync = serializers.BooleanField(default=False)
auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False)
custom_properties = serializers.JSONField(required=False)
class Meta:
model = ChannelGroupM3UAccount
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties", "is_stale", "last_seen"]
def to_representation(self, instance):
data = super().to_representation(instance)
custom_props = instance.custom_properties or {}
return data
def to_internal_value(self, data):
# Accept both dict and JSON string for custom_properties (for backward compatibility)
val = data.get("custom_properties")
if isinstance(val, str):
try:
data["custom_properties"] = json.loads(val)
except Exception:
pass
return super().to_internal_value(data)
#
# Channel Group
#
class ChannelGroupSerializer(serializers.ModelSerializer):
channel_count = serializers.SerializerMethodField()
m3u_account_count = serializers.SerializerMethodField()
m3u_accounts = ChannelGroupM3UAccountSerializer(
many=True,
read_only=True
)
class Meta:
model = ChannelGroup
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
def get_channel_count(self, obj):
"""Get count of channels in this group"""
return obj.channels.count()
def get_m3u_account_count(self, obj):
"""Get count of M3U accounts associated with this group"""
return obj.m3u_accounts.count()
fields = ['id', 'name']
class ChannelProfileSerializer(serializers.ModelSerializer):
channels = serializers.SerializerMethodField()
class Meta:
model = ChannelProfile
fields = ["id", "name", "channels"]
fields = ['id', 'name', 'channels']
def get_channels(self, obj):
memberships = ChannelProfileMembership.objects.filter(
channel_profile=obj, enabled=True
)
return [membership.channel.id for membership in memberships]
memberships = ChannelProfileMembership.objects.filter(channel_profile=obj)
return [
{
'id': membership.channel.id,
'enabled': membership.enabled
}
for membership in memberships
]
class ChannelProfileMembershipSerializer(serializers.ModelSerializer):
class Meta:
model = ChannelProfileMembership
fields = ["channel", "enabled"]
fields = ['channel', 'enabled']
class ChanneProfilelMembershipUpdateSerializer(serializers.Serializer):
channel_id = serializers.IntegerField() # Ensure channel_id is an integer
enabled = serializers.BooleanField()
class BulkChannelProfileMembershipSerializer(serializers.Serializer):
channels = serializers.ListField(
child=ChanneProfilelMembershipUpdateSerializer(), # Use the nested serializer
allow_empty=False,
allow_empty=False
)
def validate_channels(self, value):
@ -236,228 +112,145 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer):
raise serializers.ValidationError("At least one channel must be provided.")
return value
#
# Channel
#
class ChannelSerializer(serializers.ModelSerializer):
# Show nested group data, or ID
# Ensure channel_number is explicitly typed as FloatField and properly validated
channel_number = serializers.FloatField(
allow_null=True,
required=False,
error_messages={"invalid": "Channel number must be a valid decimal number."},
)
channel_number = serializers.IntegerField(allow_null=True, required=False)
channel_group = ChannelGroupSerializer(read_only=True)
channel_group_id = serializers.PrimaryKeyRelatedField(
queryset=ChannelGroup.objects.all(), source="channel_group", required=False
queryset=ChannelGroup.objects.all(),
source="channel_group",
write_only=True,
required=False
)
epg_data = EPGDataSerializer(read_only=True)
epg_data_id = serializers.PrimaryKeyRelatedField(
queryset=EPGData.objects.all(),
source="epg_data",
write_only=True,
required=False,
allow_null=True,
)
stream_profile_id = serializers.PrimaryKeyRelatedField(
queryset=StreamProfile.objects.all(),
source="stream_profile",
source='stream_profile',
allow_null=True,
required=False,
required=False
)
streams = serializers.PrimaryKeyRelatedField(
queryset=Stream.objects.all(), many=True, required=False
streams = serializers.SerializerMethodField()
stream_ids = serializers.PrimaryKeyRelatedField(
queryset=Stream.objects.all(), many=True, write_only=True, required=False
)
logo = LogoSerializer(read_only=True)
logo_id = serializers.PrimaryKeyRelatedField(
queryset=Logo.objects.all(),
source="logo",
source='logo',
allow_null=True,
required=False,
write_only=True,
)
auto_created_by_name = serializers.SerializerMethodField()
class Meta:
model = Channel
fields = [
"id",
"channel_number",
"name",
"channel_group_id",
"tvg_id",
"tvc_guide_stationid",
"epg_data_id",
"streams",
"stream_profile_id",
"uuid",
"logo_id",
"user_level",
"auto_created",
"auto_created_by",
"auto_created_by_name",
'id',
'channel_number',
'name',
'channel_group',
'channel_group_id',
'tvg_id',
'epg_data',
'epg_data_id',
'streams',
'stream_ids',
'stream_profile_id',
'uuid',
'logo',
'logo_id',
]
def to_representation(self, instance):
include_streams = self.context.get("include_streams", False)
if include_streams:
self.fields["streams"] = serializers.SerializerMethodField()
return super().to_representation(instance)
else:
# Fix: For PATCH/PUT responses, ensure streams are ordered
representation = super().to_representation(instance)
if "streams" in representation:
representation["streams"] = list(
instance.streams.all()
.order_by("channelstream__order")
.values_list("id", flat=True)
)
return representation
def get_streams(self, obj):
"""Retrieve ordered stream objects for GET requests."""
ordered_streams = obj.streams.all().order_by('channelstream__order')
return StreamSerializer(ordered_streams, many=True).data
def get_logo(self, obj):
return LogoSerializer(obj.logo).data
def get_streams(self, obj):
"""Retrieve ordered stream IDs for GET requests."""
return StreamSerializer(
obj.streams.all().order_by("channelstream__order"), many=True
).data
# def get_stream_ids(self, obj):
# """Retrieve ordered stream IDs for GET requests."""
# return list(obj.streams.all().order_by('channelstream__order').values_list('id', flat=True))
def create(self, validated_data):
streams = validated_data.pop("streams", [])
channel_number = validated_data.pop(
"channel_number", Channel.get_next_available_channel_number()
)
stream_ids = validated_data.pop('streams', [])
channel_number = validated_data.pop('channel_number', Channel.get_next_available_channel_number())
validated_data["channel_number"] = channel_number
channel = Channel.objects.create(**validated_data)
# Add streams in the specified order
for index, stream in enumerate(streams):
ChannelStream.objects.create(
channel=channel, stream_id=stream.id, order=index
)
for index, stream_id in enumerate(stream_ids):
ChannelStream.objects.create(channel=channel, stream_id=stream_id, order=index)
return channel
def update(self, instance, validated_data):
streams = validated_data.pop("streams", None)
stream_ids = validated_data.pop('stream_ids', None)
# Update standard fields
# Update all fields from validated_data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
if streams is not None:
# Normalize stream IDs
normalized_ids = [
stream.id if hasattr(stream, "id") else stream for stream in streams
]
print(normalized_ids)
# Handle streams if provided
if stream_ids is not None:
# Clear existing associations
instance.channelstream_set.all().delete()
# Get current mapping of stream_id -> ChannelStream
current_links = {
cs.stream_id: cs for cs in instance.channelstream_set.all()
}
# Track existing stream IDs
existing_ids = set(current_links.keys())
new_ids = set(normalized_ids)
# Delete any links not in the new list
to_remove = existing_ids - new_ids
if to_remove:
instance.channelstream_set.filter(stream_id__in=to_remove).delete()
# Update or create with new order
for order, stream_id in enumerate(normalized_ids):
if stream_id in current_links:
cs = current_links[stream_id]
if cs.order != order:
cs.order = order
cs.save(update_fields=["order"])
else:
ChannelStream.objects.create(
channel=instance, stream_id=stream_id, order=order
)
# Create new associations with proper ordering
for index, stream in enumerate(stream_ids):
# Extract the ID from the Stream object
actual_stream_id = stream.id if hasattr(stream, "id") else stream
print(f'Setting stream {actual_stream_id} to index {index}')
ChannelStream.objects.create(
channel=instance,
stream_id=actual_stream_id,
order=index
)
return instance
def validate_channel_number(self, value):
"""Ensure channel_number is properly processed as a float"""
if value is None:
return value
try:
# Ensure it's processed as a float
return float(value)
except (ValueError, TypeError):
raise serializers.ValidationError(
"Channel number must be a valid decimal number."
)
def validate_stream_profile(self, value):
"""Handle special case where empty/0 values mean 'use default' (null)"""
if value == "0" or value == 0 or value == "" or value is None:
if value == '0' or value == 0 or value == '' or value is None:
return None
return value # PrimaryKeyRelatedField will handle the conversion to object
def get_auto_created_by_name(self, obj):
"""Get the name of the M3U account that auto-created this channel."""
if obj.auto_created_by:
return obj.auto_created_by.name
return None
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
enabled = serializers.BooleanField()
class Meta:
model = ChannelGroupM3UAccount
fields = ['id', 'channel_group', 'enabled']
# Optionally, if you only need the id of the ChannelGroup, you can customize it like this:
# channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all())
class RecordingSerializer(serializers.ModelSerializer):
class Meta:
model = Recording
fields = "__all__"
read_only_fields = ["task_id"]
fields = '__all__'
read_only_fields = ['task_id']
def validate(self, data):
from core.models import CoreSettings
start_time = data.get("start_time")
end_time = data.get("end_time")
if start_time and timezone.is_naive(start_time):
start_time = timezone.make_aware(start_time, timezone.get_current_timezone())
data["start_time"] = start_time
if end_time and timezone.is_naive(end_time):
end_time = timezone.make_aware(end_time, timezone.get_current_timezone())
data["end_time"] = end_time
# If this is an EPG-based recording (program provided), apply global pre/post offsets
try:
cp = data.get("custom_properties") or {}
is_epg_based = isinstance(cp, dict) and isinstance(cp.get("program"), (dict,))
except Exception:
is_epg_based = False
if is_epg_based and start_time and end_time:
try:
pre_min = int(CoreSettings.get_dvr_pre_offset_minutes())
except Exception:
pre_min = 0
try:
post_min = int(CoreSettings.get_dvr_post_offset_minutes())
except Exception:
post_min = 0
from datetime import timedelta
try:
if pre_min and pre_min > 0:
start_time = start_time - timedelta(minutes=pre_min)
except Exception:
pass
try:
if post_min and post_min > 0:
end_time = end_time + timedelta(minutes=post_min)
except Exception:
pass
# write back adjusted times so scheduling uses them
data["start_time"] = start_time
data["end_time"] = end_time
start_time = data.get('start_time')
end_time = data.get('end_time')
now = timezone.now() # timezone-aware current time
@ -466,61 +259,8 @@ class RecordingSerializer(serializers.ModelSerializer):
if start_time < now:
# Optional: Adjust start_time if it's in the past but end_time is in the future
data["start_time"] = now # or: timezone.now() + timedelta(seconds=1)
if end_time <= data["start_time"]:
data['start_time'] = now # or: timezone.now() + timedelta(seconds=1)
if end_time <= data['start_time']:
raise serializers.ValidationError("End time must be after start time.")
return data
class RecurringRecordingRuleSerializer(serializers.ModelSerializer):
class Meta:
model = RecurringRecordingRule
fields = "__all__"
read_only_fields = ["created_at", "updated_at"]
def validate_days_of_week(self, value):
if not value:
raise serializers.ValidationError("Select at least one day of the week")
cleaned = []
for entry in value:
try:
iv = int(entry)
except (TypeError, ValueError):
raise serializers.ValidationError("Days of week must be integers 0-6")
if iv < 0 or iv > 6:
raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)")
cleaned.append(iv)
return sorted(set(cleaned))
def validate(self, attrs):
start = attrs.get("start_time") or getattr(self.instance, "start_time", None)
end = attrs.get("end_time") or getattr(self.instance, "end_time", None)
start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None)
end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None)
if start_date is None:
existing_start = getattr(self.instance, "start_date", None)
if existing_start is None:
raise serializers.ValidationError("Start date is required")
if start_date and end_date and end_date < start_date:
raise serializers.ValidationError("End date must be on or after start date")
if end_date is None:
existing_end = getattr(self.instance, "end_date", None)
if existing_end is None:
raise serializers.ValidationError("End date is required")
if start and end and start_date and end_date:
start_dt = datetime.combine(start_date, start)
end_dt = datetime.combine(end_date, end)
if end_dt <= start_dt:
raise serializers.ValidationError("End datetime must be after start datetime")
elif start and end and end == start:
raise serializers.ValidationError("End time must be different from start time")
# Normalize empty strings to None for dates
if attrs.get("end_date") == "":
attrs["end_date"] = None
if attrs.get("start_date") == "":
attrs["start_date"] = None
return super().validate(attrs)
def create(self, validated_data):
return super().create(validated_data)

View file

@ -8,7 +8,7 @@ from .models import Channel, Stream, ChannelProfile, ChannelProfileMembership, R
from apps.m3u.models import M3UAccount
from apps.epg.tasks import parse_programs_for_tvg_id
import logging, requests, time
from .tasks import run_recording, prefetch_recording_artwork
from .tasks import run_recording
from django.utils.timezone import now, is_aware, make_aware
from datetime import timedelta
@ -45,20 +45,6 @@ def set_default_m3u_account(sender, instance, **kwargs):
else:
raise ValueError("No default M3UAccount found.")
@receiver(post_save, sender=Stream)
def generate_custom_stream_hash(sender, instance, created, **kwargs):
"""
Generate a stable stream_hash for custom streams after creation.
Uses the stream's ID to ensure the hash never changes even if name/url is edited.
"""
if instance.is_custom and not instance.stream_hash and created:
import hashlib
# Use stream ID for a stable, unique hash that never changes
unique_string = f"custom_stream_{instance.id}"
instance.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
# Use update to avoid triggering signals again
Stream.objects.filter(id=instance.id).update(stream_hash=instance.stream_hash)
@receiver(post_save, sender=Channel)
def refresh_epg_programs(sender, instance, created, **kwargs):
"""
@ -76,6 +62,15 @@ def refresh_epg_programs(sender, instance, created, **kwargs):
logger.info(f"New channel {instance.id} ({instance.name}) created with EPG data, refreshing program data")
parse_programs_for_tvg_id.delay(instance.epg_data.id)
@receiver(post_save, sender=Channel)
def add_new_channel_to_groups(sender, instance, created, **kwargs):
if created:
profiles = ChannelProfile.objects.all()
ChannelProfileMembership.objects.bulk_create([
ChannelProfileMembership(channel_profile=profile, channel=instance)
for profile in profiles
])
@receiver(post_save, sender=ChannelProfile)
def create_profile_memberships(sender, instance, created, **kwargs):
if created:
@ -87,9 +82,8 @@ def create_profile_memberships(sender, instance, created, **kwargs):
def schedule_recording_task(instance):
eta = instance.start_time
# Pass recording_id first so task can persist metadata to the correct row
task = run_recording.apply_async(
args=[instance.id, instance.channel_id, str(instance.start_time), str(instance.end_time)],
args=[instance.channel_id, str(instance.start_time), str(instance.end_time)],
eta=eta
)
return task.id
@ -138,11 +132,6 @@ def schedule_task_on_save(sender, instance, created, **kwargs):
instance.save(update_fields=['task_id'])
else:
print("Start time is in the past. Not scheduling.")
# Kick off poster/artwork prefetch to enrich Upcoming cards
try:
prefetch_recording_artwork.apply_async(args=[instance.id], countdown=1)
except Exception as e:
print("Error scheduling artwork prefetch:", e)
except Exception as e:
import traceback
print("Error in post_save signal:", e)

File diff suppressed because it is too large Load diff

View file

@ -1,211 +0,0 @@
from django.test import TestCase
from django.contrib.auth import get_user_model
from rest_framework.test import APIClient
from rest_framework import status
from apps.channels.models import Channel, ChannelGroup
User = get_user_model()
class ChannelBulkEditAPITests(TestCase):
def setUp(self):
# Create a test admin user (user_level >= 10) and authenticate
self.user = User.objects.create_user(username="testuser", password="testpass123")
self.user.user_level = 10 # Set admin level
self.user.save()
self.client = APIClient()
self.client.force_authenticate(user=self.user)
self.bulk_edit_url = "/api/channels/channels/edit/bulk/"
# Create test channel group
self.group1 = ChannelGroup.objects.create(name="Test Group 1")
self.group2 = ChannelGroup.objects.create(name="Test Group 2")
# Create test channels
self.channel1 = Channel.objects.create(
channel_number=1.0,
name="Channel 1",
tvg_id="channel1",
channel_group=self.group1
)
self.channel2 = Channel.objects.create(
channel_number=2.0,
name="Channel 2",
tvg_id="channel2",
channel_group=self.group1
)
self.channel3 = Channel.objects.create(
channel_number=3.0,
name="Channel 3",
tvg_id="channel3"
)
def test_bulk_edit_success(self):
"""Test successful bulk update of multiple channels"""
data = [
{"id": self.channel1.id, "name": "Updated Channel 1"},
{"id": self.channel2.id, "name": "Updated Channel 2", "channel_number": 22.0},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
self.assertEqual(len(response.data["channels"]), 2)
# Verify database changes
self.channel1.refresh_from_db()
self.channel2.refresh_from_db()
self.assertEqual(self.channel1.name, "Updated Channel 1")
self.assertEqual(self.channel2.name, "Updated Channel 2")
self.assertEqual(self.channel2.channel_number, 22.0)
def test_bulk_edit_with_empty_validated_data_first(self):
"""
Test the bug fix: when first channel has empty validated_data.
This was causing: ValueError: Field names must be given to bulk_update()
"""
# Create a channel with data that will be "unchanged" (empty validated_data)
# We'll send the same data it already has
data = [
# First channel: no actual changes (this would create empty validated_data)
{"id": self.channel1.id},
# Second channel: has changes
{"id": self.channel2.id, "name": "Updated Channel 2"},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
# Should not crash with ValueError
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
# Verify the channel with changes was updated
self.channel2.refresh_from_db()
self.assertEqual(self.channel2.name, "Updated Channel 2")
def test_bulk_edit_all_empty_updates(self):
"""Test when all channels have empty updates (no actual changes)"""
data = [
{"id": self.channel1.id},
{"id": self.channel2.id},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
# Should succeed without calling bulk_update
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
def test_bulk_edit_mixed_fields(self):
"""Test bulk update where different channels update different fields"""
data = [
{"id": self.channel1.id, "name": "New Name 1"},
{"id": self.channel2.id, "channel_number": 99.0},
{"id": self.channel3.id, "tvg_id": "new_tvg_id", "name": "New Name 3"},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Successfully updated 3 channels")
# Verify all updates
self.channel1.refresh_from_db()
self.channel2.refresh_from_db()
self.channel3.refresh_from_db()
self.assertEqual(self.channel1.name, "New Name 1")
self.assertEqual(self.channel2.channel_number, 99.0)
self.assertEqual(self.channel3.tvg_id, "new_tvg_id")
self.assertEqual(self.channel3.name, "New Name 3")
def test_bulk_edit_with_channel_group(self):
"""Test bulk update with channel_group_id changes"""
data = [
{"id": self.channel1.id, "channel_group_id": self.group2.id},
{"id": self.channel3.id, "channel_group_id": self.group1.id},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Verify group changes
self.channel1.refresh_from_db()
self.channel3.refresh_from_db()
self.assertEqual(self.channel1.channel_group, self.group2)
self.assertEqual(self.channel3.channel_group, self.group1)
def test_bulk_edit_nonexistent_channel(self):
"""Test bulk update with a channel that doesn't exist"""
nonexistent_id = 99999
data = [
{"id": nonexistent_id, "name": "Should Fail"},
{"id": self.channel1.id, "name": "Should Still Update"},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
# Should return 400 with errors
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("errors", response.data)
self.assertEqual(len(response.data["errors"]), 1)
self.assertEqual(response.data["errors"][0]["channel_id"], nonexistent_id)
self.assertEqual(response.data["errors"][0]["error"], "Channel not found")
# The valid channel should still be updated
self.assertEqual(response.data["updated_count"], 1)
def test_bulk_edit_validation_error(self):
"""Test bulk update with invalid data (validation error)"""
data = [
{"id": self.channel1.id, "channel_number": "invalid_number"},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
# Should return 400 with validation errors
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("errors", response.data)
self.assertEqual(len(response.data["errors"]), 1)
self.assertIn("channel_number", response.data["errors"][0]["errors"])
def test_bulk_edit_empty_channel_updates(self):
"""Test bulk update with empty list"""
data = []
response = self.client.patch(self.bulk_edit_url, data, format="json")
# Empty list is accepted and returns success with 0 updates
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Successfully updated 0 channels")
def test_bulk_edit_missing_channel_updates(self):
"""Test bulk update without proper format (dict instead of list)"""
data = {"channel_updates": {}}
response = self.client.patch(self.bulk_edit_url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data["error"], "Expected a list of channel updates")
def test_bulk_edit_preserves_other_fields(self):
"""Test that bulk update only changes specified fields"""
original_channel_number = self.channel1.channel_number
original_tvg_id = self.channel1.tvg_id
data = [
{"id": self.channel1.id, "name": "Only Name Changed"},
]
response = self.client.patch(self.bulk_edit_url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Verify only name changed, other fields preserved
self.channel1.refresh_from_db()
self.assertEqual(self.channel1.name, "Only Name Changed")
self.assertEqual(self.channel1.channel_number, original_channel_number)
self.assertEqual(self.channel1.tvg_id, original_tvg_id)

View file

@ -1,40 +0,0 @@
from datetime import datetime, timedelta
from django.test import TestCase
from django.utils import timezone
from apps.channels.models import Channel, RecurringRecordingRule, Recording
from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl
class RecurringRecordingRuleTasksTests(TestCase):
def test_sync_recurring_rule_creates_and_purges_recordings(self):
now = timezone.now()
channel = Channel.objects.create(channel_number=1, name='Test Channel')
start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0)
end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0)
rule = RecurringRecordingRule.objects.create(
channel=channel,
days_of_week=[now.weekday()],
start_time=start_time,
end_time=end_time,
)
created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1)
self.assertEqual(created, 1)
recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first()
self.assertIsNotNone(recording)
self.assertEqual(recording.channel, channel)
self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id)
expected_start = timezone.make_aware(
datetime.combine(recording.start_time.date(), start_time),
timezone.get_current_timezone(),
)
self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60)
removed = purge_recurring_rule_impl(rule.id)
self.assertEqual(removed, 1)
self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists())

View file

@ -2,66 +2,47 @@ import logging, os
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import action
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from django.utils import timezone
from datetime import timedelta
from .models import EPGSource, ProgramData, EPGData # Added ProgramData
from .serializers import (
ProgramDataSerializer,
EPGSourceSerializer,
EPGDataSerializer,
) # Updated serializer
from .serializers import ProgramDataSerializer, EPGSourceSerializer, EPGDataSerializer # Updated serializer
from .tasks import refresh_epg_data
from apps.accounts.permissions import (
Authenticated,
permission_classes_by_action,
permission_classes_by_method,
)
logger = logging.getLogger(__name__)
# ─────────────────────────────
# 1) EPG Source API (CRUD)
# ─────────────────────────────
class EPGSourceViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows EPG sources to be viewed or edited.
"""
"""Handles CRUD operations for EPG sources"""
queryset = EPGSource.objects.all()
serializer_class = EPGSourceSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
def list(self, request, *args, **kwargs):
logger.debug("Listing all EPG sources.")
return super().list(request, *args, **kwargs)
@action(detail=False, methods=["post"])
@action(detail=False, methods=['post'])
def upload(self, request):
if "file" not in request.FILES:
return Response(
{"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST
)
if 'file' not in request.FILES:
return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST)
file = request.FILES["file"]
file = request.FILES['file']
file_name = file.name
file_path = os.path.join("/data/uploads/epgs", file_name)
file_path = os.path.join('/data/uploads/epgs', file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, "wb+") as destination:
with open(file_path, 'wb+') as destination:
for chunk in file.chunks():
destination.write(chunk)
new_obj_data = request.data.copy()
new_obj_data["file_path"] = file_path
new_obj_data['file_path'] = file_path
serializer = self.get_serializer(data=new_obj_data)
serializer.is_valid(raise_exception=True)
@ -69,241 +50,72 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED)
def partial_update(self, request, *args, **kwargs):
"""Handle partial updates with special logic for is_active field"""
instance = self.get_object()
# Check if we're toggling is_active
if (
"is_active" in request.data
and instance.is_active != request.data["is_active"]
):
# Set appropriate status based on new is_active value
if request.data["is_active"]:
request.data["status"] = "idle"
else:
request.data["status"] = "disabled"
# Continue with regular partial update
return super().partial_update(request, *args, **kwargs)
# ─────────────────────────────
# 2) Program API (CRUD)
# ─────────────────────────────
class ProgramViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for EPG programs"""
queryset = ProgramData.objects.all()
serializer_class = ProgramDataSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
def list(self, request, *args, **kwargs):
logger.debug("Listing all EPG programs.")
return super().list(request, *args, **kwargs)
# ─────────────────────────────
# 3) EPG Grid View
# ─────────────────────────────
class EPGGridAPIView(APIView):
"""Returns all programs airing in the next 24 hours including currently running ones and recent ones"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Retrieve programs from the previous hour, currently running and upcoming for the next 24 hours",
responses={200: ProgramDataSerializer(many=True)},
responses={200: ProgramDataSerializer(many=True)}
)
def get(self, request, format=None):
# Use current time instead of midnight
now = timezone.now()
one_hour_ago = now - timedelta(hours=1)
twenty_four_hours_later = now + timedelta(hours=24)
logger.debug(
f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}."
)
logger.debug(f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}.")
# Use select_related to prefetch EPGData and include programs from the last hour
programs = ProgramData.objects.select_related("epg").filter(
programs = ProgramData.objects.select_related('epg').filter(
# Programs that end after one hour ago (includes recently ended programs)
end_time__gt=one_hour_ago,
# AND start before the end time window
start_time__lt=twenty_four_hours_later,
start_time__lt=twenty_four_hours_later
)
count = programs.count()
logger.debug(
f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows."
)
logger.debug(f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows.")
# Generate dummy programs for channels that have no EPG data OR dummy EPG sources
# Generate dummy programs for channels that have no EPG data
from apps.channels.models import Channel
from apps.epg.models import EPGSource
from django.db.models import Q
# Get channels with no EPG data at all (standard dummy)
# Get channels with no EPG data
channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True))
channels_count = channels_without_epg.count()
# Get channels with custom dummy EPG sources (generate on-demand with patterns)
channels_with_custom_dummy = Channel.objects.filter(
epg_data__epg_source__source_type='dummy'
).distinct()
# Log what we found
without_count = channels_without_epg.count()
custom_count = channels_with_custom_dummy.count()
if without_count > 0:
# Log more detailed information about channels missing EPG data
if channels_count > 0:
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg]
logger.debug(
f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}"
)
logger.warning(f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}")
if custom_count > 0:
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy]
logger.debug(
f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}"
)
logger.debug(
f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG."
)
logger.debug(f"EPGGridAPIView: Found {channels_count} channels with no EPG data.")
# Serialize the regular programs
serialized_programs = ProgramDataSerializer(programs, many=True).data
# Humorous program descriptions based on time of day - same as in output/views.py
time_descriptions = {
(0, 4): [
"Late Night with {channel} - Where insomniacs unite!",
"The 'Why Am I Still Awake?' Show on {channel}",
"Counting Sheep - A {channel} production for the sleepless",
],
(4, 8): [
"Dawn Patrol - Rise and shine with {channel}!",
"Early Bird Special - Coffee not included",
"Morning Zombies - Before coffee viewing on {channel}",
],
(8, 12): [
"Mid-Morning Meetings - Pretend you're paying attention while watching {channel}",
"The 'I Should Be Working' Hour on {channel}",
"Productivity Killer - {channel}'s daytime programming",
],
(12, 16): [
"Lunchtime Laziness with {channel}",
"The Afternoon Slump - Brought to you by {channel}",
"Post-Lunch Food Coma Theater on {channel}",
],
(16, 20): [
"Rush Hour - {channel}'s alternative to traffic",
"The 'What's For Dinner?' Debate on {channel}",
"Evening Escapism - {channel}'s remedy for reality",
],
(20, 24): [
"Prime Time Placeholder - {channel}'s finest not-programming",
"The 'Netflix Was Too Complicated' Show on {channel}",
"Family Argument Avoider - Courtesy of {channel}",
],
}
# Generate and append dummy programs
dummy_programs = []
# Import the function from output.views
from apps.output.views import generate_dummy_programs as gen_dummy_progs
# Handle channels with CUSTOM dummy EPG sources (with patterns)
for channel in channels_with_custom_dummy:
# For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel
# This prevents multiple channels assigned to the same dummy EPG from showing identical data
# Each channel gets its own unique program data even if they share the same EPG source
dummy_tvg_id = str(channel.uuid)
try:
# Get the custom dummy EPG source
epg_source = channel.epg_data.epg_source if channel.epg_data else None
logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})")
# Determine which name to parse based on custom properties
name_to_parse = channel.name
if epg_source and epg_source.custom_properties:
custom_props = epg_source.custom_properties
name_source = custom_props.get('name_source')
if name_source == 'stream':
# Get the stream index (1-based from user, convert to 0-based)
stream_index = custom_props.get('stream_index', 1) - 1
# Get streams ordered by channelstream order
channel_streams = channel.streams.all().order_by('channelstream__order')
if channel_streams.exists() and 0 <= stream_index < channel_streams.count():
stream = list(channel_streams)[stream_index]
name_to_parse = stream.name
logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})")
else:
logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name")
elif name_source == 'channel':
logger.debug(f"Using channel name for parsing: {name_to_parse}")
# Generate programs using custom patterns from the dummy EPG source
# Use the same tvg_id that will be set in the program data
generated = gen_dummy_progs(
channel_id=dummy_tvg_id,
channel_name=name_to_parse,
num_days=1,
program_length_hours=4,
epg_source=epg_source
)
# Custom dummy should always return data (either from patterns or fallback)
if generated:
logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}")
# Convert generated programs to API format
for program in generated:
dummy_program = {
"id": f"dummy-custom-{channel.id}-{program['start_time'].hour}",
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
"start_time": program['start_time'].isoformat(),
"end_time": program['end_time'].isoformat(),
"title": program['title'],
"description": program['description'],
"tvg_id": dummy_tvg_id,
"sub_title": None,
"custom_properties": None,
}
dummy_programs.append(dummy_program)
else:
logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}")
except Exception as e:
logger.error(
f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
)
# Handle channels with NO EPG data (standard dummy with humorous descriptions)
for channel in channels_without_epg:
# For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic)
# The frontend uses: tvgRecord?.tvg_id ?? channel.uuid
# Since there's no EPG data, it will fall back to UUID
# Use the channel UUID as tvg_id for dummy programs to match in the guide
dummy_tvg_id = str(channel.uuid)
try:
logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})")
# Create programs every 4 hours for the next 24 hours with humorous descriptions
# Create programs every 4 hours for the next 24 hours
for hour_offset in range(0, 24, 4):
# Use timedelta for time arithmetic instead of replace() to avoid hour overflow
start_time = now + timedelta(hours=hour_offset)
@ -311,51 +123,35 @@ class EPGGridAPIView(APIView):
start_time = start_time.replace(minute=0, second=0, microsecond=0)
end_time = start_time + timedelta(hours=4)
# Get the hour for selecting a description
hour = start_time.hour
day = 0 # Use 0 as we're only doing 1 day
# Find the appropriate time slot for description
for time_range, descriptions in time_descriptions.items():
start_range, end_range = time_range
if start_range <= hour < end_range:
# Pick a description using the sum of the hour and day as seed
# This makes it somewhat random but consistent for the same timeslot
description = descriptions[
(hour + day) % len(descriptions)
].format(channel=channel.name)
break
else:
# Fallback description if somehow no range matches
description = f"Placeholder program for {channel.name} - EPG data went on vacation"
# Create a dummy program in the same format as regular programs
dummy_program = {
"id": f"dummy-standard-{channel.id}-{hour_offset}",
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
"start_time": start_time.isoformat(),
"end_time": end_time.isoformat(),
"title": f"{channel.name}",
"description": description,
"tvg_id": dummy_tvg_id,
"sub_title": None,
"custom_properties": None,
'id': f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
'epg': {
'tvg_id': dummy_tvg_id,
'name': channel.name
},
'start_time': start_time.isoformat(),
'end_time': end_time.isoformat(),
'title': f"{channel.name}",
'description': f"Placeholder program for {channel.name}",
'tvg_id': dummy_tvg_id,
'sub_title': None,
'custom_properties': None
}
dummy_programs.append(dummy_program)
# Also update the channel to use this dummy tvg_id
channel.tvg_id = dummy_tvg_id
channel.save(update_fields=['tvg_id'])
except Exception as e:
logger.error(
f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
)
logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}")
# Combine regular and dummy programs
all_programs = list(serialized_programs) + dummy_programs
logger.debug(
f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs)."
)
return Response({"data": all_programs}, status=status.HTTP_200_OK)
logger.debug(f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs).")
return Response({'data': all_programs}, status=status.HTTP_200_OK)
# ─────────────────────────────
# 4) EPG Import View
@ -363,41 +159,15 @@ class EPGGridAPIView(APIView):
class EPGImportAPIView(APIView):
"""Triggers an EPG data refresh"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers an EPG data import",
responses={202: "EPG data import initiated"},
responses={202: "EPG data import initiated"}
)
def post(self, request, format=None):
logger.info("EPGImportAPIView: Received request to import EPG data.")
epg_id = request.data.get("id", None)
# Check if this is a dummy EPG source
try:
from .models import EPGSource
epg_source = EPGSource.objects.get(id=epg_id)
if epg_source.source_type == 'dummy':
logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}")
return Response(
{"success": False, "message": "Dummy EPG sources do not require refreshing."},
status=status.HTTP_400_BAD_REQUEST,
)
except EPGSource.DoesNotExist:
pass # Let the task handle the missing source
refresh_epg_data.delay(epg_id) # Trigger Celery task
refresh_epg_data.delay(request.data.get('id', None)) # Trigger Celery task
logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.")
return Response(
{"success": True, "message": "EPG data import initiated."},
status=status.HTTP_202_ACCEPTED,
)
return Response({'success': True, 'message': 'EPG data import initiated.'}, status=status.HTTP_202_ACCEPTED)
# ─────────────────────────────
@ -407,13 +177,6 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet):
"""
API endpoint that allows EPGData objects to be viewed.
"""
queryset = EPGData.objects.all()
serializer_class = EPGDataSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]

View file

@ -1,23 +0,0 @@
# Generated by Django
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0006_epgsource_refresh_interval_epgsource_refresh_task'),
]
operations = [
migrations.AddField(
model_name='epgsource',
name='status',
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success')], default='idle', max_length=20),
),
migrations.AddField(
model_name='epgsource',
name='last_error',
field=models.TextField(blank=True, null=True),
),
]

View file

@ -1,14 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-03 21:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('epg', '0007_epgsource_status_epgsource_last_error'),
('epg', '0009_alter_epgsource_created_at_and_more'),
]
operations = [
]

View file

@ -1,42 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-04 21:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0010_merge_20250503_2147'),
]
operations = [
# Change updated_at field
migrations.AlterField(
model_name='epgsource',
name='updated_at',
field=models.DateTimeField(blank=True, help_text='Time when this source was last successfully refreshed', null=True),
),
# Add new last_message field
migrations.AddField(
model_name='epgsource',
name='last_message',
field=models.TextField(blank=True, help_text='Last status message, including success results or error information', null=True),
),
# Copy data from last_error to last_message
migrations.RunPython(
code=lambda apps, schema_editor: apps.get_model('epg', 'EPGSource').objects.all().update(
last_message=models.F('last_error')
),
reverse_code=lambda apps, schema_editor: apps.get_model('epg', 'EPGSource').objects.all().update(
last_error=models.F('last_message')
),
),
# Remove the old field
migrations.RemoveField(
model_name='epgsource',
name='last_error',
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-15 01:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0011_update_epgsource_fields'),
]
operations = [
migrations.AlterField(
model_name='epgsource',
name='status',
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success'), ('disabled', 'Disabled')], default='idle', max_length=20),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-21 19:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0012_alter_epgsource_status'),
]
operations = [
migrations.AlterField(
model_name='epgsource',
name='refresh_interval',
field=models.IntegerField(default=0),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-26 15:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0013_alter_epgsource_refresh_interval'),
]
operations = [
migrations.AddField(
model_name='epgsource',
name='extracted_file_path',
field=models.CharField(blank=True, help_text='Path to extracted XML file after decompression', max_length=1024, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-02 14:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0014_epgsource_extracted_file_path'),
]
operations = [
migrations.AlterField(
model_name='programdata',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-16 22:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0015_alter_programdata_custom_properties'),
]
operations = [
migrations.AddField(
model_name='epgdata',
name='icon_url',
field=models.URLField(blank=True, max_length=500, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-24 21:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0016_epgdata_icon_url'),
]
operations = [
migrations.AlterField(
model_name='epgsource',
name='url',
field=models.URLField(blank=True, max_length=1000, null=True),
),
]

View file

@ -1,23 +0,0 @@
# Generated by Django 5.2.4 on 2025-10-17 17:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0017_alter_epgsource_url'),
]
operations = [
migrations.AddField(
model_name='epgsource',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True),
),
migrations.AlterField(
model_name='epgsource',
name='source_type',
field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-10-22 21:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0018_epgsource_custom_properties_and_more'),
]
operations = [
migrations.AlterField(
model_name='programdata',
name='sub_title',
field=models.TextField(blank=True, null=True),
),
]

View file

@ -1,119 +0,0 @@
# Generated migration to replace {time} placeholders with {starttime}
import re
from django.db import migrations
def migrate_time_placeholders(apps, schema_editor):
"""
Replace {time} with {starttime} and {time24} with {starttime24}
in all dummy EPG source custom_properties templates.
"""
EPGSource = apps.get_model('epg', 'EPGSource')
# Fields that contain templates with placeholders
template_fields = [
'title_template',
'description_template',
'upcoming_title_template',
'upcoming_description_template',
'ended_title_template',
'ended_description_template',
'channel_logo_url',
'program_poster_url',
]
# Get all dummy EPG sources
dummy_sources = EPGSource.objects.filter(source_type='dummy')
updated_count = 0
for source in dummy_sources:
if not source.custom_properties:
continue
modified = False
custom_props = source.custom_properties.copy()
for field in template_fields:
if field in custom_props and custom_props[field]:
original_value = custom_props[field]
# Replace {time24} first (before {time}) to avoid double replacement
# e.g., {time24} shouldn't become {starttime24} via {time} -> {starttime}
new_value = original_value
new_value = re.sub(r'\{time24\}', '{starttime24}', new_value)
new_value = re.sub(r'\{time\}', '{starttime}', new_value)
if new_value != original_value:
custom_props[field] = new_value
modified = True
if modified:
source.custom_properties = custom_props
source.save(update_fields=['custom_properties'])
updated_count += 1
if updated_count > 0:
print(f"Migration complete: Updated {updated_count} dummy EPG source(s) with new placeholder names.")
else:
print("No dummy EPG sources needed placeholder updates.")
def reverse_migration(apps, schema_editor):
"""
Reverse the migration by replacing {starttime} back to {time}.
"""
EPGSource = apps.get_model('epg', 'EPGSource')
template_fields = [
'title_template',
'description_template',
'upcoming_title_template',
'upcoming_description_template',
'ended_title_template',
'ended_description_template',
'channel_logo_url',
'program_poster_url',
]
dummy_sources = EPGSource.objects.filter(source_type='dummy')
updated_count = 0
for source in dummy_sources:
if not source.custom_properties:
continue
modified = False
custom_props = source.custom_properties.copy()
for field in template_fields:
if field in custom_props and custom_props[field]:
original_value = custom_props[field]
# Reverse the replacements
new_value = original_value
new_value = re.sub(r'\{starttime24\}', '{time24}', new_value)
new_value = re.sub(r'\{starttime\}', '{time}', new_value)
if new_value != original_value:
custom_props[field] = new_value
modified = True
if modified:
source.custom_properties = custom_props
source.save(update_fields=['custom_properties'])
updated_count += 1
if updated_count > 0:
print(f"Reverse migration complete: Reverted {updated_count} dummy EPG source(s) to old placeholder names.")
class Migration(migrations.Migration):
dependencies = [
('epg', '0019_alter_programdata_sub_title'),
]
operations = [
migrations.RunPython(migrate_time_placeholders, reverse_migration),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-12-05 15:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0020_migrate_time_to_starttime_placeholders'),
]
operations = [
migrations.AddField(
model_name='epgsource',
name='priority',
field=models.PositiveIntegerField(default=0, help_text='Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel.'),
),
]

View file

@ -1,144 +1,39 @@
from django.db import models
from django.utils import timezone
from django_celery_beat.models import PeriodicTask
from django.conf import settings
import os
class EPGSource(models.Model):
SOURCE_TYPE_CHOICES = [
('xmltv', 'XMLTV URL'),
('schedules_direct', 'Schedules Direct API'),
('dummy', 'Custom Dummy EPG'),
]
STATUS_IDLE = 'idle'
STATUS_FETCHING = 'fetching'
STATUS_PARSING = 'parsing'
STATUS_ERROR = 'error'
STATUS_SUCCESS = 'success'
STATUS_DISABLED = 'disabled'
STATUS_CHOICES = [
(STATUS_IDLE, 'Idle'),
(STATUS_FETCHING, 'Fetching'),
(STATUS_PARSING, 'Parsing'),
(STATUS_ERROR, 'Error'),
(STATUS_SUCCESS, 'Success'),
(STATUS_DISABLED, 'Disabled'),
]
name = models.CharField(max_length=255, unique=True)
source_type = models.CharField(max_length=20, choices=SOURCE_TYPE_CHOICES)
url = models.URLField(max_length=1000, blank=True, null=True) # For XMLTV
url = models.URLField(blank=True, null=True) # For XMLTV
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
is_active = models.BooleanField(default=True)
file_path = models.CharField(max_length=1024, blank=True, null=True)
extracted_file_path = models.CharField(max_length=1024, blank=True, null=True,
help_text="Path to extracted XML file after decompression")
refresh_interval = models.IntegerField(default=0)
refresh_interval = models.IntegerField(default=24)
refresh_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
)
custom_properties = models.JSONField(
default=dict,
blank=True,
null=True,
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
)
priority = models.PositiveIntegerField(
default=0,
help_text="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel."
)
status = models.CharField(
max_length=20,
choices=STATUS_CHOICES,
default=STATUS_IDLE
)
last_message = models.TextField(
null=True,
blank=True,
help_text="Last status message, including success results or error information"
)
created_at = models.DateTimeField(
auto_now_add=True,
help_text="Time when this source was created"
)
updated_at = models.DateTimeField(
null=True, blank=True,
help_text="Time when this source was last successfully refreshed"
auto_now=True,
help_text="Time when this source was last updated"
)
def __str__(self):
return self.name
def get_cache_file(self):
import mimetypes
# Use a temporary extension for initial download
# The actual extension will be determined after content inspection
file_ext = ".tmp"
# If file_path is already set and contains an extension, use that
# This handles cases where we've already detected the proper type
if self.file_path and os.path.exists(self.file_path):
_, existing_ext = os.path.splitext(self.file_path)
if existing_ext:
file_ext = existing_ext
else:
# Try to detect the MIME type and map to extension
mime_type, _ = mimetypes.guess_type(self.file_path)
if mime_type:
if mime_type == 'application/gzip' or mime_type == 'application/x-gzip':
file_ext = '.gz'
elif mime_type == 'application/zip':
file_ext = '.zip'
elif mime_type == 'application/xml' or mime_type == 'text/xml':
file_ext = '.xml'
# For files without mime type detection, try peeking at content
else:
try:
with open(self.file_path, 'rb') as f:
header = f.read(4)
# Check for gzip magic number (1f 8b)
if header[:2] == b'\x1f\x8b':
file_ext = '.gz'
# Check for zip magic number (PK..)
elif header[:2] == b'PK':
file_ext = '.zip'
# Check for XML
elif header[:5] == b'<?xml' or header[:5] == b'<tv>':
file_ext = '.xml'
except Exception as e:
# If we can't read the file, just keep the default extension
pass
filename = f"{self.id}{file_ext}"
# Build full path in MEDIA_ROOT/cached_epg
cache_dir = os.path.join(settings.MEDIA_ROOT, "cached_epg")
# Create directory if it doesn't exist
os.makedirs(cache_dir, exist_ok=True)
cache = os.path.join(cache_dir, filename)
return cache
def save(self, *args, **kwargs):
# Prevent auto_now behavior by handling updated_at manually
if 'update_fields' in kwargs and 'updated_at' not in kwargs['update_fields']:
# Don't modify updated_at for regular updates
kwargs.setdefault('update_fields', [])
if 'updated_at' in kwargs['update_fields']:
kwargs['update_fields'].remove('updated_at')
super().save(*args, **kwargs)
class EPGData(models.Model):
# Removed the Channel foreign key. We now just store the original tvg_id
# and a name (which might simply be the tvg_id if no real channel exists).
tvg_id = models.CharField(max_length=255, null=True, blank=True, db_index=True)
name = models.CharField(max_length=255)
icon_url = models.URLField(max_length=500, null=True, blank=True)
epg_source = models.ForeignKey(
EPGSource,
on_delete=models.CASCADE,
@ -159,10 +54,10 @@ class ProgramData(models.Model):
start_time = models.DateTimeField()
end_time = models.DateTimeField()
title = models.CharField(max_length=255)
sub_title = models.TextField(blank=True, null=True)
sub_title = models.CharField(max_length=255, blank=True, null=True)
description = models.TextField(blank=True, null=True)
tvg_id = models.CharField(max_length=255, null=True, blank=True)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
custom_properties = models.TextField(null=True, blank=True)
def __str__(self):
return f"{self.title} ({self.start_time} - {self.end_time})"

View file

@ -1,41 +1,17 @@
from core.utils import validate_flexible_url
from rest_framework import serializers
from .models import EPGSource, EPGData, ProgramData
from apps.channels.models import Channel
class EPGSourceSerializer(serializers.ModelSerializer):
epg_data_count = serializers.SerializerMethodField()
epg_data_ids = serializers.SerializerMethodField()
read_only_fields = ['created_at', 'updated_at']
url = serializers.CharField(
required=False,
allow_blank=True,
allow_null=True,
validators=[validate_flexible_url]
)
class Meta:
model = EPGSource
fields = [
'id',
'name',
'source_type',
'url',
'api_key',
'is_active',
'file_path',
'refresh_interval',
'priority',
'status',
'last_message',
'created_at',
'updated_at',
'custom_properties',
'epg_data_count'
]
fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval', 'created_at', 'updated_at']
def get_epg_data_count(self, obj):
"""Return the count of EPG data entries instead of all IDs to prevent large payloads"""
return obj.epgs.count()
def get_epg_data_ids(self, obj):
return list(obj.epgs.values_list('id', flat=True))
class ProgramDataSerializer(serializers.ModelSerializer):
class Meta:
@ -55,6 +31,5 @@ class EPGDataSerializer(serializers.ModelSerializer):
'id',
'tvg_id',
'name',
'icon_url',
'epg_source',
]

View file

@ -1,88 +1,21 @@
from django.db.models.signals import post_save, post_delete, pre_save
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from .models import EPGSource, EPGData
from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id
from .models import EPGSource
from .tasks import refresh_epg_data
from django_celery_beat.models import PeriodicTask, IntervalSchedule
from core.utils import is_protected_path, send_websocket_update
import json
import logging
import os
logger = logging.getLogger(__name__)
@receiver(post_save, sender=EPGSource)
def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs):
# Trigger refresh only if the source is newly created, active, and not a dummy EPG
if created and instance.is_active and instance.source_type != 'dummy':
# Trigger refresh only if the source is newly created and active
if created and instance.is_active:
refresh_epg_data.delay(instance.id)
@receiver(post_save, sender=EPGSource)
def create_dummy_epg_data(sender, instance, created, **kwargs):
"""
Automatically create EPGData for dummy EPG sources when they are created.
This allows channels to be assigned to dummy EPGs immediately without
requiring a refresh first.
"""
if instance.source_type == 'dummy':
# Ensure dummy EPGs always have idle status and no status message
if instance.status != EPGSource.STATUS_IDLE or instance.last_message:
instance.status = EPGSource.STATUS_IDLE
instance.last_message = None
instance.save(update_fields=['status', 'last_message'])
# Create a URL-friendly tvg_id from the dummy EPG name
# Replace spaces and special characters with underscores
friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_')
# Remove any characters that aren't alphanumeric or underscores
friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_')
# Convert to lowercase for consistency
friendly_tvg_id = friendly_tvg_id.lower()
# Prefix with 'dummy_' to make it clear this is a dummy EPG
friendly_tvg_id = f"dummy_{friendly_tvg_id}"
# Create or update the EPGData record
epg_data, data_created = EPGData.objects.get_or_create(
tvg_id=friendly_tvg_id,
epg_source=instance,
defaults={
'name': instance.name,
'icon_url': None
}
)
# Update name if it changed and record already existed
if not data_created and epg_data.name != instance.name:
epg_data.name = instance.name
epg_data.save(update_fields=['name'])
if data_created:
logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})")
# Send websocket update to notify frontend that EPG data has been created
# This allows the channel form to immediately show the new dummy EPG without refreshing
send_websocket_update('updates', 'update', {
'type': 'epg_data_created',
'source_id': instance.id,
'source_name': instance.name,
'epg_data_id': epg_data.id
})
else:
logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})")
@receiver(post_save, sender=EPGSource)
def create_or_update_refresh_task(sender, instance, **kwargs):
"""
Create or update a Celery Beat periodic task when an EPGSource is created/updated.
Skip creating tasks for dummy EPG sources as they don't need refreshing.
"""
# Skip task creation for dummy EPGs
if instance.source_type == 'dummy':
# If there's an existing task, disable it
if instance.refresh_task:
instance.refresh_task.enabled = False
instance.refresh_task.save(update_fields=['enabled'])
return
task_name = f"epg_source-refresh-{instance.id}"
interval, _ = IntervalSchedule.objects.get_or_create(
every=int(instance.refresh_interval),
@ -93,7 +26,7 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
"interval": interval,
"task": "apps.epg.tasks.refresh_epg_data",
"kwargs": json.dumps({"source_id": instance.id}),
"enabled": instance.refresh_interval != 0 and instance.is_active,
"enabled": instance.refresh_interval != 0,
})
update_fields = []
@ -103,11 +36,8 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
if task.interval != interval:
task.interval = interval
update_fields.append("interval")
# Check both refresh_interval and is_active to determine if task should be enabled
should_be_enabled = instance.refresh_interval != 0 and instance.is_active
if task.enabled != should_be_enabled:
task.enabled = should_be_enabled
if task.enabled != (instance.refresh_interval != 0):
task.enabled = instance.refresh_interval != 0
update_fields.append("enabled")
if update_fields:
@ -115,82 +45,12 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
if instance.refresh_task != task:
instance.refresh_task = task
instance.save(update_fields=["refresh_task"]) # Fixed field name
instance.save(update_fields=update_fields)
@receiver(post_delete, sender=EPGSource)
def delete_refresh_task(sender, instance, **kwargs):
"""
Delete the associated Celery Beat periodic task when an EPGSource is deleted.
Delete the associated Celery Beat periodic task when a Channel is deleted.
"""
try:
# First try the foreign key relationship to find the task ID
task = None
if instance.refresh_task:
logger.info(f"Found task via foreign key: {instance.refresh_task.id} for EPGSource {instance.id}")
task = instance.refresh_task
# Store task ID before deletion if we need to bypass the helper function
if task:
delete_epg_refresh_task_by_id(instance.id)
else:
# Otherwise use the helper function
delete_epg_refresh_task_by_id(instance.id)
except Exception as e:
logger.error(f"Error in delete_refresh_task signal handler: {str(e)}", exc_info=True)
@receiver(pre_save, sender=EPGSource)
def update_status_on_active_change(sender, instance, **kwargs):
"""
When an EPGSource's is_active field changes, update the status accordingly.
For dummy EPGs, always ensure status is idle and no status message.
"""
# Dummy EPGs should always be idle with no status message
if instance.source_type == 'dummy':
instance.status = EPGSource.STATUS_IDLE
instance.last_message = None
return
if instance.pk: # Only for existing records, not new ones
try:
# Get the current record from the database
old_instance = EPGSource.objects.get(pk=instance.pk)
# If is_active changed, update the status
if old_instance.is_active != instance.is_active:
if instance.is_active:
# When activating, set status to idle
instance.status = 'idle'
else:
# When deactivating, set status to disabled
instance.status = 'disabled'
except EPGSource.DoesNotExist:
# New record, will use default status
pass
@receiver(post_delete, sender=EPGSource)
def delete_cached_files(sender, instance, **kwargs):
"""
Delete cached files associated with an EPGSource when it's deleted.
Only deletes files that aren't in protected directories.
"""
# Check and delete the main file path if not protected
if instance.file_path and os.path.exists(instance.file_path):
if is_protected_path(instance.file_path):
logger.info(f"Skipping deletion of protected file: {instance.file_path}")
else:
try:
os.remove(instance.file_path)
logger.info(f"Deleted cached file: {instance.file_path}")
except OSError as e:
logger.error(f"Error deleting cached file {instance.file_path}: {e}")
# Check and delete the extracted file path if it exists, is different from main path, and not protected
if instance.extracted_file_path and os.path.exists(instance.extracted_file_path) and instance.extracted_file_path != instance.file_path:
if is_protected_path(instance.extracted_file_path):
logger.info(f"Skipping deletion of protected extracted file: {instance.extracted_file_path}")
else:
try:
os.remove(instance.extracted_file_path)
logger.info(f"Deleted extracted file: {instance.extracted_file_path}")
except OSError as e:
logger.error(f"Error deleting extracted file {instance.extracted_file_path}: {e}")
if instance.refresh_task:
instance.refresh_task.delete()

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.accounts.permissions import Authenticated, permission_classes_by_action
from rest_framework.permissions import IsAuthenticated
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
import logging
from drf_yasg.utils import swagger_auto_schema
@ -17,30 +17,22 @@ from django.views import View
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from apps.m3u.models import M3UAccountProfile
# Configure logger
logger = logging.getLogger(__name__)
@login_required
def hdhr_dashboard_view(request):
"""Render the HDHR management page."""
hdhr_devices = HDHRDevice.objects.all()
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
# 🔹 1) HDHomeRun Device API
class HDHRDeviceViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for HDHomeRun devices"""
queryset = HDHRDevice.objects.all()
serializer_class = HDHRDeviceSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
# 🔹 2) Discover API
@ -49,33 +41,53 @@ class DiscoverAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve HDHomeRun device discovery information",
responses={200: openapi.Response("HDHR Discovery JSON")},
responses={200: openapi.Response("HDHR Discovery JSON")}
)
def get(self, request, profile=None):
uri_parts = ["hdhr"]
if profile is not None:
uri_parts.append(profile)
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip("/")
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/')
device = HDHRDevice.objects.first()
# Calculate tuner count using centralized function
from apps.m3u.utils import calculate_tuner_count
tuner_count = calculate_tuner_count(minimum=1, unlimited_default=10)
# Calculate tuner count from active profiles (excluding default "custom Default" profile)
profiles = M3UAccountProfile.objects.filter(is_active=True).exclude(id=1)
# 1. Check if any profile has unlimited streams (max_streams=0)
has_unlimited = profiles.filter(max_streams=0).exists()
# 2. Calculate tuner count from limited profiles
limited_tuners = 0
if not has_unlimited:
limited_tuners = profiles.filter(max_streams__gt=0).aggregate(
total=models.Sum('max_streams')
).get('total', 0) or 0
# 3. Add custom stream count to tuner count
custom_stream_count = Stream.objects.filter(is_custom=True).count()
logger.debug(f"Found {custom_stream_count} custom streams")
# 4. Calculate final tuner count
if has_unlimited:
# If there are unlimited profiles, start with 10 plus custom streams
tuner_count = 10 + custom_stream_count
else:
# Otherwise use the limited profile sum plus custom streams
tuner_count = limited_tuners + custom_stream_count
# 5. Ensure minimum of 2 tuners
tuner_count = max(2, tuner_count)
logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})")
# Create a unique DeviceID for the HDHomeRun device based on profile ID or a default value
device_ID = "12345678" # Default DeviceID
friendly_name = "Dispatcharr HDHomeRun"
if profile is not None:
device_ID = f"dispatcharr-hdhr-{profile}"
friendly_name = f"Dispatcharr HDHomeRun - {profile}"
if not device:
data = {
"FriendlyName": friendly_name,
"FriendlyName": "Dispatcharr HDHomeRun",
"ModelNumber": "HDTC-2US",
"FirmwareName": "hdhomerun3_atsc",
"FirmwareVersion": "20200101",
"DeviceID": device_ID,
"DeviceID": "12345678",
"DeviceAuth": "test_auth_token",
"BaseURL": base_url,
"LineupURL": f"{base_url}/lineup.json",
@ -102,38 +114,28 @@ class LineupAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the available channel lineup",
responses={200: openapi.Response("Channel Lineup JSON")},
responses={200: openapi.Response("Channel Lineup JSON")}
)
def get(self, request, profile=None):
if profile is not None:
channel_profile = ChannelProfile.objects.get(name=profile)
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True,
).order_by("channel_number")
channelprofilemembership__enabled=True
).order_by('channel_number')
else:
channels = Channel.objects.all().order_by("channel_number")
channels = Channel.objects.all().order_by('channel_number')
lineup = []
for ch in channels:
# Format channel number as integer if it has no decimal component
if ch.channel_number is not None:
if ch.channel_number == int(ch.channel_number):
formatted_channel_number = str(int(ch.channel_number))
else:
formatted_channel_number = str(ch.channel_number)
else:
formatted_channel_number = ""
lineup.append(
{
"GuideNumber": formatted_channel_number,
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
"Guide_ID": formatted_channel_number,
"Station": formatted_channel_number,
}
)
lineup = [
{
"GuideNumber": str(ch.channel_number),
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
"Guide_ID": str(ch.channel_number),
"Station": str(ch.channel_number),
}
for ch in channels
]
return JsonResponse(lineup, safe=False)
@ -143,14 +145,14 @@ class LineupStatusAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun lineup status",
responses={200: openapi.Response("Lineup Status JSON")},
responses={200: openapi.Response("Lineup Status JSON")}
)
def get(self, request, profile=None):
data = {
"ScanInProgress": 0,
"ScanPossible": 0,
"Source": "Cable",
"SourceList": ["Cable"],
"SourceList": ["Cable"]
}
return JsonResponse(data)
@ -161,10 +163,10 @@ class HDHRDeviceXMLAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun device XML configuration",
responses={200: openapi.Response("HDHR Device XML")},
responses={200: openapi.Response("HDHR Device XML")}
)
def get(self, request):
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
<root>

View file

@ -2,7 +2,6 @@ import os
import socket
import threading
import time
import gevent # Add this import
from django.conf import settings
# SSDP Multicast Address and Port
@ -60,7 +59,7 @@ def ssdp_broadcaster(host_ip):
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
while True:
sock.sendto(notify.encode("utf-8"), (SSDP_MULTICAST, SSDP_PORT))
gevent.sleep(30) # Replace time.sleep with gevent.sleep
time.sleep(30)
def start_ssdp():
host_ip = get_host_ip()

View file

@ -1,7 +1,7 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.accounts.permissions import Authenticated, permission_classes_by_action
from rest_framework.permissions import IsAuthenticated
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
@ -16,26 +16,18 @@ from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
@login_required
def hdhr_dashboard_view(request):
"""Render the HDHR management page."""
hdhr_devices = HDHRDevice.objects.all()
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
# 🔹 1) HDHomeRun Device API
class HDHRDeviceViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for HDHomeRun devices"""
queryset = HDHRDevice.objects.all()
serializer_class = HDHRDeviceSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
# 🔹 2) Discover API
@ -44,10 +36,10 @@ class DiscoverAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve HDHomeRun device discovery information",
responses={200: openapi.Response("HDHR Discovery JSON")},
responses={200: openapi.Response("HDHR Discovery JSON")}
)
def get(self, request):
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
device = HDHRDevice.objects.first()
if not device:
@ -83,15 +75,15 @@ class LineupAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the available channel lineup",
responses={200: openapi.Response("Channel Lineup JSON")},
responses={200: openapi.Response("Channel Lineup JSON")}
)
def get(self, request):
channels = Channel.objects.all().order_by("channel_number")
channels = Channel.objects.all().order_by('channel_number')
lineup = [
{
"GuideNumber": str(ch.channel_number),
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}")
}
for ch in channels
]
@ -104,14 +96,14 @@ class LineupStatusAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun lineup status",
responses={200: openapi.Response("Lineup Status JSON")},
responses={200: openapi.Response("Lineup Status JSON")}
)
def get(self, request):
data = {
"ScanInProgress": 0,
"ScanPossible": 0,
"Source": "Cable",
"SourceList": ["Cable"],
"SourceList": ["Cable"]
}
return JsonResponse(data)
@ -122,10 +114,10 @@ class HDHRDeviceXMLAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun device XML configuration",
responses={200: openapi.Response("HDHR Device XML")},
responses={200: openapi.Response("HDHR Device XML")}
)
def get(self, request):
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
<root>

View file

@ -1,8 +1,6 @@
from django.contrib import admin
from django.utils.html import format_html
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent, M3UAccountProfile
import json
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent
class M3UFilterInline(admin.TabularInline):
model = M3UFilter
@ -10,181 +8,50 @@ class M3UFilterInline(admin.TabularInline):
verbose_name = "M3U Filter"
verbose_name_plural = "M3U Filters"
@admin.register(M3UAccount)
class M3UAccountAdmin(admin.ModelAdmin):
list_display = (
"name",
"server_url",
"server_group",
"max_streams",
"priority",
"is_active",
"user_agent_display",
"uploaded_file_link",
"created_at",
"updated_at",
)
list_filter = ("is_active", "server_group")
search_fields = ("name", "server_url", "server_group__name")
list_display = ('name', 'server_url', 'server_group', 'max_streams', 'is_active', 'user_agent_display', 'uploaded_file_link', 'created_at', 'updated_at')
list_filter = ('is_active', 'server_group')
search_fields = ('name', 'server_url', 'server_group__name')
inlines = [M3UFilterInline]
actions = ["activate_accounts", "deactivate_accounts"]
actions = ['activate_accounts', 'deactivate_accounts']
# Handle both ForeignKey and ManyToManyField cases for UserAgent
def user_agent_display(self, obj):
if hasattr(obj, "user_agent"): # ForeignKey case
if hasattr(obj, 'user_agent'): # ForeignKey case
return obj.user_agent.user_agent if obj.user_agent else "None"
elif hasattr(obj, "user_agents"): # ManyToManyField case
elif hasattr(obj, 'user_agents'): # ManyToManyField case
return ", ".join([ua.user_agent for ua in obj.user_agents.all()]) or "None"
return "None"
user_agent_display.short_description = "User Agent(s)"
def vod_enabled_display(self, obj):
"""Display whether VOD is enabled for this account"""
if obj.custom_properties:
custom_props = obj.custom_properties or {}
return "Yes" if custom_props.get('enable_vod', False) else "No"
return "No"
vod_enabled_display.short_description = "VOD Enabled"
vod_enabled_display.boolean = True
def uploaded_file_link(self, obj):
if obj.uploaded_file:
return format_html(
"<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url
)
return format_html("<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url)
return "No file uploaded"
uploaded_file_link.short_description = "Uploaded File"
@admin.action(description="Activate selected accounts")
@admin.action(description='Activate selected accounts')
def activate_accounts(self, request, queryset):
queryset.update(is_active=True)
@admin.action(description="Deactivate selected accounts")
@admin.action(description='Deactivate selected accounts')
def deactivate_accounts(self, request, queryset):
queryset.update(is_active=False)
# Add ManyToManyField for Django Admin (if applicable)
if hasattr(M3UAccount, "user_agents"):
filter_horizontal = ("user_agents",) # Only for ManyToManyField
if hasattr(M3UAccount, 'user_agents'):
filter_horizontal = ('user_agents',) # Only for ManyToManyField
@admin.register(M3UFilter)
class M3UFilterAdmin(admin.ModelAdmin):
list_display = ("m3u_account", "filter_type", "regex_pattern", "exclude")
list_filter = ("filter_type", "exclude")
search_fields = ("regex_pattern",)
ordering = ("m3u_account",)
list_display = ('m3u_account', 'filter_type', 'regex_pattern', 'exclude')
list_filter = ('filter_type', 'exclude')
search_fields = ('regex_pattern',)
ordering = ('m3u_account',)
@admin.register(ServerGroup)
class ServerGroupAdmin(admin.ModelAdmin):
list_display = ("name",)
search_fields = ("name",)
list_display = ('name',)
search_fields = ('name',)
@admin.register(M3UAccountProfile)
class M3UAccountProfileAdmin(admin.ModelAdmin):
list_display = (
"name",
"m3u_account",
"is_default",
"is_active",
"max_streams",
"current_viewers",
"account_status_display",
"account_expiration_display",
"last_refresh_display",
)
list_filter = ("is_active", "is_default", "m3u_account__account_type")
search_fields = ("name", "m3u_account__name")
readonly_fields = ("account_info_display",)
def account_status_display(self, obj):
"""Display account status from custom properties"""
status = obj.get_account_status()
if status:
# Create colored status display
color_map = {
'Active': 'green',
'Expired': 'red',
'Disabled': 'red',
'Banned': 'red',
}
color = color_map.get(status, 'black')
return format_html(
'<span style="color: {};">{}</span>',
color,
status
)
return "Unknown"
account_status_display.short_description = "Account Status"
def account_expiration_display(self, obj):
"""Display account expiration from custom properties"""
expiration = obj.get_account_expiration()
if expiration:
from datetime import datetime
if expiration < datetime.now():
return format_html(
'<span style="color: red;">{}</span>',
expiration.strftime('%Y-%m-%d %H:%M')
)
else:
return format_html(
'<span style="color: green;">{}</span>',
expiration.strftime('%Y-%m-%d %H:%M')
)
return "Unknown"
account_expiration_display.short_description = "Expires"
def last_refresh_display(self, obj):
"""Display last refresh time from custom properties"""
last_refresh = obj.get_last_refresh()
if last_refresh:
return last_refresh.strftime('%Y-%m-%d %H:%M:%S')
return "Never"
last_refresh_display.short_description = "Last Refresh"
def account_info_display(self, obj):
"""Display formatted account information from custom properties"""
if not obj.custom_properties:
return "No account information available"
html_parts = []
# User Info
user_info = obj.custom_properties.get('user_info', {})
if user_info:
html_parts.append("<h3>User Information:</h3>")
html_parts.append("<ul>")
for key, value in user_info.items():
if key == 'exp_date' and value:
try:
from datetime import datetime
exp_date = datetime.fromtimestamp(float(value))
value = exp_date.strftime('%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
pass
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
html_parts.append("</ul>")
# Server Info
server_info = obj.custom_properties.get('server_info', {})
if server_info:
html_parts.append("<h3>Server Information:</h3>")
html_parts.append("<ul>")
for key, value in server_info.items():
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
html_parts.append("</ul>")
# Last Refresh
last_refresh = obj.custom_properties.get('last_refresh')
if last_refresh:
html_parts.append(f"<p><strong>Last Refresh:</strong> {last_refresh}</p>")
return format_html(''.join(html_parts)) if html_parts else "No account information available"
account_info_display.short_description = "Account Information"

View file

@ -1,44 +1,18 @@
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .api_views import (
M3UAccountViewSet,
M3UFilterViewSet,
ServerGroupViewSet,
RefreshM3UAPIView,
RefreshSingleM3UAPIView,
RefreshAccountInfoAPIView,
UserAgentViewSet,
M3UAccountProfileViewSet,
)
from .api_views import M3UAccountViewSet, M3UFilterViewSet, ServerGroupViewSet, RefreshM3UAPIView, RefreshSingleM3UAPIView, UserAgentViewSet, M3UAccountProfileViewSet
app_name = "m3u"
app_name = 'm3u'
router = DefaultRouter()
router.register(r"accounts", M3UAccountViewSet, basename="m3u-account")
router.register(
r"accounts\/(?P<account_id>\d+)\/profiles",
M3UAccountProfileViewSet,
basename="m3u-account-profiles",
)
router.register(
r"accounts\/(?P<account_id>\d+)\/filters",
M3UFilterViewSet,
basename="m3u-filters",
)
router.register(r"server-groups", ServerGroupViewSet, basename="server-group")
router.register(r'accounts', M3UAccountViewSet, basename='m3u-account')
router.register(r'accounts\/(?P<account_id>\d+)\/profiles', M3UAccountProfileViewSet, basename='m3u-account-profiles')
router.register(r'filters', M3UFilterViewSet, basename='m3u-filter')
router.register(r'server-groups', ServerGroupViewSet, basename='server-group')
urlpatterns = [
path("refresh/", RefreshM3UAPIView.as_view(), name="m3u_refresh"),
path(
"refresh/<int:account_id>/",
RefreshSingleM3UAPIView.as_view(),
name="m3u_refresh_single",
),
path(
"refresh-account-info/<int:profile_id>/",
RefreshAccountInfoAPIView.as_view(),
name="m3u_refresh_account_info",
),
path('refresh/', RefreshM3UAPIView.as_view(), name='m3u_refresh'),
path('refresh/<int:account_id>/', RefreshSingleM3UAPIView.as_view(), name='m3u_refresh_single'),
]
urlpatterns += router.urls

View file

@ -1,11 +1,7 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.accounts.permissions import (
Authenticated,
permission_classes_by_action,
permission_classes_by_method,
)
from rest_framework.permissions import IsAuthenticated
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from django.shortcuts import get_object_or_404
@ -14,15 +10,13 @@ from django.core.cache import cache
import os
from rest_framework.decorators import action
from django.conf import settings
from .tasks import refresh_m3u_groups
import json
# Import all models, including UserAgent.
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
from core.models import UserAgent
from apps.channels.models import ChannelGroupM3UAccount
from core.serializers import UserAgentSerializer
from apps.vod.models import M3UVODCategoryRelation
# Import all serializers, including the UserAgentSerializer.
from .serializers import (
M3UAccountSerializer,
M3UFilterSerializer,
@ -30,455 +24,130 @@ from .serializers import (
M3UAccountProfileSerializer,
)
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts, refresh_account_info
import json
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts
from django.core.files.storage import default_storage
from django.core.files.base import ContentFile
class M3UAccountViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for M3U accounts"""
queryset = M3UAccount.objects.prefetch_related("channel_group")
queryset = M3UAccount.objects.prefetch_related('channel_group')
serializer_class = M3UAccountSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
def create(self, request, *args, **kwargs):
# Handle file upload first, if any
file_path = None
if "file" in request.FILES:
file = request.FILES["file"]
if 'file' in request.FILES:
file = request.FILES['file']
file_name = file.name
file_path = os.path.join("/data/uploads/m3us", file_name)
file_path = os.path.join('/data/uploads/m3us', file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, "wb+") as destination:
with open(file_path, 'wb+') as destination:
for chunk in file.chunks():
destination.write(chunk)
# Add file_path to the request data so it's available during creation
request.data._mutable = True # Allow modification of the request data
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
# Handle the user_agent field - convert "null" string to None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data['file_path'] = file_path # Include the file path if a file was uploaded
request.data.pop('server_url')
request.data._mutable = False # Make the request data immutable again
# Now call super().create() to create the instance
response = super().create(request, *args, **kwargs)
account_type = response.data.get("account_type")
account_id = response.data.get("id")
# Notify frontend that a new playlist was created
from core.utils import send_websocket_update
send_websocket_update('updates', 'update', {
'type': 'playlist_created',
'playlist_id': account_id
})
if account_type == M3UAccount.Types.XC:
refresh_m3u_groups(account_id)
# Check if VOD is enabled
enable_vod = request.data.get("enable_vod", False)
if enable_vod:
from apps.vod.tasks import refresh_categories
refresh_categories(account_id)
# After the instance is created, return the response
return response
def update(self, request, *args, **kwargs):
instance = self.get_object()
old_vod_enabled = False
# Check current VOD setting
if instance.custom_properties:
custom_props = instance.custom_properties or {}
old_vod_enabled = custom_props.get("enable_vod", False)
# Handle file upload first, if any
file_path = None
if "file" in request.FILES:
file = request.FILES["file"]
if 'file' in request.FILES:
file = request.FILES['file']
file_name = file.name
file_path = os.path.join("/data/uploads/m3us", file_name)
file_path = os.path.join('/data/uploads/m3us', file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, "wb+") as destination:
with open(file_path, 'wb+') as destination:
for chunk in file.chunks():
destination.write(chunk)
# Add file_path to the request data so it's available during creation
request.data._mutable = True # Allow modification of the request data
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
# Handle the user_agent field - convert "null" string to None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data['file_path'] = file_path # Include the file path if a file was uploaded
request.data.pop('server_url')
request.data._mutable = False # Make the request data immutable again
if instance.file_path and os.path.exists(instance.file_path):
os.remove(instance.file_path)
# Now call super().update() to update the instance
# Now call super().create() to create the instance
response = super().update(request, *args, **kwargs)
# Check if VOD setting changed and trigger refresh if needed
new_vod_enabled = request.data.get("enable_vod", old_vod_enabled)
if (
instance.account_type == M3UAccount.Types.XC
and not old_vod_enabled
and new_vod_enabled
):
# Create Uncategorized categories immediately so they're available in the UI
from apps.vod.models import VODCategory, M3UVODCategoryRelation
# Create movie Uncategorized category
movie_category, _ = VODCategory.objects.get_or_create(
name="Uncategorized",
category_type="movie",
defaults={}
)
# Create series Uncategorized category
series_category, _ = VODCategory.objects.get_or_create(
name="Uncategorized",
category_type="series",
defaults={}
)
# Create relations for both categories (disabled by default until first refresh)
account_custom_props = instance.custom_properties or {}
auto_enable_new = account_custom_props.get("auto_enable_new_groups_vod", True)
M3UVODCategoryRelation.objects.get_or_create(
category=movie_category,
m3u_account=instance,
defaults={
'enabled': auto_enable_new,
'custom_properties': {}
}
)
M3UVODCategoryRelation.objects.get_or_create(
category=series_category,
m3u_account=instance,
defaults={
'enabled': auto_enable_new,
'custom_properties': {}
}
)
# Trigger full VOD refresh
from apps.vod.tasks import refresh_vod_content
refresh_vod_content.delay(instance.id)
# After the instance is updated, return the response
# After the instance is created, return the response
return response
def partial_update(self, request, *args, **kwargs):
"""Handle partial updates with special logic for is_active field"""
instance = self.get_object()
# Check if we're toggling is_active
if (
"is_active" in request.data
and instance.is_active != request.data["is_active"]
):
# Set appropriate status based on new is_active value
if request.data["is_active"]:
request.data["status"] = M3UAccount.Status.IDLE
else:
request.data["status"] = M3UAccount.Status.DISABLED
# Continue with regular partial update
return super().partial_update(request, *args, **kwargs)
@action(detail=True, methods=["post"], url_path="refresh-vod")
def refresh_vod(self, request, pk=None):
"""Trigger VOD content refresh for XtreamCodes accounts"""
account = self.get_object()
if account.account_type != M3UAccount.Types.XC:
return Response(
{"error": "VOD refresh is only available for XtreamCodes accounts"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if VOD is enabled
vod_enabled = False
if account.custom_properties:
custom_props = account.custom_properties or {}
vod_enabled = custom_props.get("enable_vod", False)
if not vod_enabled:
return Response(
{"error": "VOD is not enabled for this account"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
from apps.vod.tasks import refresh_vod_content
refresh_vod_content.delay(account.id)
return Response(
{"message": f"VOD refresh initiated for account {account.name}"},
status=status.HTTP_202_ACCEPTED,
)
except Exception as e:
return Response(
{"error": f"Failed to initiate VOD refresh: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@action(detail=True, methods=["patch"], url_path="group-settings")
def update_group_settings(self, request, pk=None):
"""Update auto channel sync settings for M3U account groups"""
account = self.get_object()
group_settings = request.data.get("group_settings", [])
category_settings = request.data.get("category_settings", [])
try:
for setting in group_settings:
group_id = setting.get("channel_group")
enabled = setting.get("enabled", True)
auto_sync = setting.get("auto_channel_sync", False)
sync_start = setting.get("auto_sync_channel_start")
custom_properties = setting.get("custom_properties", {})
if group_id:
ChannelGroupM3UAccount.objects.update_or_create(
channel_group_id=group_id,
m3u_account=account,
defaults={
"enabled": enabled,
"auto_channel_sync": auto_sync,
"auto_sync_channel_start": sync_start,
"custom_properties": custom_properties,
},
)
for setting in category_settings:
category_id = setting.get("id")
enabled = setting.get("enabled", True)
custom_properties = setting.get("custom_properties", {})
if category_id:
M3UVODCategoryRelation.objects.update_or_create(
category_id=category_id,
m3u_account=account,
defaults={
"enabled": enabled,
"custom_properties": custom_properties,
},
)
return Response({"message": "Group settings updated successfully"})
except Exception as e:
return Response(
{"error": f"Failed to update group settings: {str(e)}"},
status=status.HTTP_400_BAD_REQUEST,
)
class M3UFilterViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for M3U filters"""
queryset = M3UFilter.objects.all()
serializer_class = M3UFilterSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
def get_queryset(self):
m3u_account_id = self.kwargs["account_id"]
return M3UFilter.objects.filter(m3u_account_id=m3u_account_id)
def perform_create(self, serializer):
# Get the account ID from the URL
account_id = self.kwargs["account_id"]
# # Get the M3UAccount instance for the account_id
# m3u_account = M3UAccount.objects.get(id=account_id)
# Save the 'm3u_account' in the serializer context
serializer.context["m3u_account"] = account_id
# Perform the actual save
serializer.save(m3u_account_id=account_id)
permission_classes = [IsAuthenticated]
class ServerGroupViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Server Groups"""
queryset = ServerGroup.objects.all()
serializer_class = ServerGroupSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
class RefreshM3UAPIView(APIView):
"""Triggers refresh for all active M3U accounts"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers a refresh of all active M3U accounts",
responses={202: "M3U refresh initiated"},
responses={202: "M3U refresh initiated"}
)
def post(self, request, format=None):
refresh_m3u_accounts.delay()
return Response(
{"success": True, "message": "M3U refresh initiated."},
status=status.HTTP_202_ACCEPTED,
)
return Response({'success': True, 'message': 'M3U refresh initiated.'}, status=status.HTTP_202_ACCEPTED)
class RefreshSingleM3UAPIView(APIView):
"""Triggers refresh for a single M3U account"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers a refresh of a single M3U account",
responses={202: "M3U account refresh initiated"},
responses={202: "M3U account refresh initiated"}
)
def post(self, request, account_id, format=None):
refresh_single_m3u_account.delay(account_id)
return Response(
{
"success": True,
"message": f"M3U account {account_id} refresh initiated.",
},
status=status.HTTP_202_ACCEPTED,
)
class RefreshAccountInfoAPIView(APIView):
"""Triggers account info refresh for a single M3U account"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers a refresh of account information for a specific M3U profile",
responses={202: "Account info refresh initiated", 400: "Profile not found or not XtreamCodes"},
)
def post(self, request, profile_id, format=None):
try:
from .models import M3UAccountProfile
profile = M3UAccountProfile.objects.get(id=profile_id)
account = profile.m3u_account
if account.account_type != M3UAccount.Types.XC:
return Response(
{
"success": False,
"error": "Account info refresh is only available for XtreamCodes accounts",
},
status=status.HTTP_400_BAD_REQUEST,
)
refresh_account_info.delay(profile_id)
return Response(
{
"success": True,
"message": f"Account info refresh initiated for profile {profile.name}.",
},
status=status.HTTP_202_ACCEPTED,
)
except M3UAccountProfile.DoesNotExist:
return Response(
{
"success": False,
"error": "Profile not found",
},
status=status.HTTP_404_NOT_FOUND,
)
return Response({'success': True, 'message': f'M3U account {account_id} refresh initiated.'},
status=status.HTTP_202_ACCEPTED)
class UserAgentViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for User Agents"""
queryset = UserAgent.objects.all()
serializer_class = UserAgentSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
class M3UAccountProfileViewSet(viewsets.ModelViewSet):
queryset = M3UAccountProfile.objects.all()
serializer_class = M3UAccountProfileSerializer
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
permission_classes = [IsAuthenticated]
def get_queryset(self):
m3u_account_id = self.kwargs["account_id"]
m3u_account_id = self.kwargs['account_id']
return M3UAccountProfile.objects.filter(m3u_account_id=m3u_account_id)
def perform_create(self, serializer):
# Get the account ID from the URL
account_id = self.kwargs["account_id"]
account_id = self.kwargs['account_id']
# Get the M3UAccount instance for the account_id
m3u_account = M3UAccount.objects.get(id=account_id)
# Save the 'm3u_account' in the serializer context
serializer.context["m3u_account"] = m3u_account
serializer.context['m3u_account'] = m3u_account
# Perform the actual save
serializer.save(m3u_account_id=m3u_account)

View file

@ -4,13 +4,6 @@ from .models import M3UAccount, M3UFilter
import re
class M3UAccountForm(forms.ModelForm):
enable_vod = forms.BooleanField(
required=False,
initial=False,
label="Enable VOD Content",
help_text="Parse and import VOD (movies/series) content for XtreamCodes accounts"
)
class Meta:
model = M3UAccount
fields = [
@ -20,34 +13,8 @@ class M3UAccountForm(forms.ModelForm):
'server_group',
'max_streams',
'is_active',
'enable_vod',
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Set initial value for enable_vod from custom_properties
if self.instance and self.instance.custom_properties:
custom_props = self.instance.custom_properties or {}
self.fields['enable_vod'].initial = custom_props.get('enable_vod', False)
def save(self, commit=True):
instance = super().save(commit=False)
# Handle enable_vod field
enable_vod = self.cleaned_data.get('enable_vod', False)
# Parse existing custom_properties
custom_props = instance.custom_properties or {}
# Update VOD preference
custom_props['enable_vod'] = enable_vod
instance.custom_properties = custom_props
if commit:
instance.save()
return instance
def clean_uploaded_file(self):
uploaded_file = self.cleaned_data.get('uploaded_file')
if uploaded_file:

View file

@ -3,7 +3,6 @@
from django.db import migrations
from core.models import CoreSettings
def create_custom_account(apps, schema_editor):
default_user_agent_id = CoreSettings.get_default_user_agent_id()
@ -19,7 +18,7 @@ def create_custom_account(apps, schema_editor):
M3UAccountProfile = apps.get_model("m3u", "M3UAccountProfile")
M3UAccountProfile.objects.create(
m3u_account=m3u_account,
name=f"{m3u_account.name} Default",
name=f'{m3u_account.name} Default',
max_streams=m3u_account.max_streams,
is_default=True,
is_active=True,
@ -27,12 +26,10 @@ def create_custom_account(apps, schema_editor):
replace_pattern="$1",
)
class Migration(migrations.Migration):
dependencies = [
("m3u", "0002_m3uaccount_locked"),
("core", "0004_preload_core_settings"),
('m3u', '0002_m3uaccount_locked'),
]
operations = [

View file

@ -7,29 +7,24 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("django_celery_beat", "0019_alter_periodictasks_options"),
("m3u", "0004_m3uaccount_stream_profile"),
('django_celery_beat', '0019_alter_periodictasks_options'),
('m3u', '0004_m3uaccount_stream_profile'),
]
operations = [
migrations.AddField(
model_name="m3uaccount",
name="custom_properties",
model_name='m3uaccount',
name='custom_properties',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="m3uaccount",
name="refresh_interval",
model_name='m3uaccount',
name='refresh_interval',
field=models.IntegerField(default=24),
),
migrations.AddField(
model_name="m3uaccount",
name="refresh_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="django_celery_beat.periodictask",
),
model_name='m3uaccount',
name='refresh_task',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='django_celery_beat.periodictask'),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0007_remove_m3uaccount_uploaded_file_m3uaccount_file_path'),
]
operations = [
migrations.AddField(
model_name='m3uaccount',
name='stale_stream_days',
field=models.PositiveIntegerField(default=7, help_text='Number of days after which a stream will be removed if not seen in the M3U source.'),
),
]

View file

@ -1,28 +0,0 @@
# Generated by Django 5.1.6 on 2025-04-27 12:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0008_m3uaccount_stale_stream_days'),
]
operations = [
migrations.AddField(
model_name='m3uaccount',
name='account_type',
field=models.CharField(choices=[('STD', 'Standard'), ('XC', 'Xtream Codes')], default='STD'),
),
migrations.AddField(
model_name='m3uaccount',
name='password',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='m3uaccount',
name='username',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View file

@ -1,28 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-04 21:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0009_m3uaccount_account_type_m3uaccount_password_and_more'),
]
operations = [
migrations.AddField(
model_name='m3uaccount',
name='last_message',
field=models.TextField(blank=True, null=True, help_text="Last status message, including success results or error information"),
),
migrations.AddField(
model_name='m3uaccount',
name='status',
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success')], default='idle', max_length=20),
),
migrations.AlterField(
model_name='m3uaccount',
name='updated_at',
field=models.DateTimeField(blank=True, help_text='Time when this account was last successfully refreshed', null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-15 01:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0010_add_status_fields_and_remove_auto_now'),
]
operations = [
migrations.AlterField(
model_name='m3uaccount',
name='status',
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success'), ('pending_setup', 'Pending Setup'), ('disabled', 'Disabled')], default='idle', max_length=20),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-05-21 19:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0011_alter_m3uaccount_status'),
]
operations = [
migrations.AlterField(
model_name='m3uaccount',
name='refresh_interval',
field=models.IntegerField(default=0),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.1.6 on 2025-07-22 21:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0012_alter_m3uaccount_refresh_interval'),
]
operations = [
migrations.AlterField(
model_name='m3ufilter',
name='filter_type',
field=models.CharField(choices=[('group', 'Group'), ('name', 'Stream Name'), ('url', 'Stream URL')], default='group', help_text='Filter based on either group title or stream name.', max_length=50),
),
]

View file

@ -1,22 +0,0 @@
# Generated by Django 5.1.6 on 2025-07-31 17:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0013_alter_m3ufilter_filter_type'),
]
operations = [
migrations.AlterModelOptions(
name='m3ufilter',
options={'ordering': ['order']},
),
migrations.AddField(
model_name='m3ufilter',
name='order',
field=models.PositiveIntegerField(default=0),
),
]

View file

@ -1,22 +0,0 @@
# Generated by Django 5.2.4 on 2025-08-02 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0014_alter_m3ufilter_options_m3ufilter_order'),
]
operations = [
migrations.AlterModelOptions(
name='m3ufilter',
options={},
),
migrations.AddField(
model_name='m3ufilter',
name='custom_properties',
field=models.TextField(blank=True, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-08-20 22:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0015_alter_m3ufilter_options_m3ufilter_custom_properties'),
]
operations = [
migrations.AddField(
model_name='m3uaccount',
name='priority',
field=models.PositiveIntegerField(default=0, help_text='Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.'),
),
]

View file

@ -1,28 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-02 15:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0016_m3uaccount_priority'),
]
operations = [
migrations.AlterField(
model_name='m3uaccount',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
migrations.AlterField(
model_name='m3uaccount',
name='server_url',
field=models.URLField(blank=True, help_text='The base URL of the M3U server (optional if a file is uploaded)', max_length=1000, null=True),
),
migrations.AlterField(
model_name='m3ufilter',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, null=True),
),
]

View file

@ -1,18 +0,0 @@
# Generated by Django 5.2.4 on 2025-09-09 20:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('m3u', '0017_alter_m3uaccount_custom_properties_and_more'),
]
operations = [
migrations.AddField(
model_name='m3uaccountprofile',
name='custom_properties',
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for storing account information from provider (e.g., XC account details, expiration dates)', null=True),
),
]

View file

@ -7,98 +7,73 @@ from apps.channels.models import StreamProfile
from django_celery_beat.models import PeriodicTask
from core.models import CoreSettings, UserAgent
CUSTOM_M3U_ACCOUNT_NAME = "custom"
CUSTOM_M3U_ACCOUNT_NAME="custom"
class M3UAccount(models.Model):
class Types(models.TextChoices):
STADNARD = "STD", "Standard"
XC = "XC", "Xtream Codes"
class Status(models.TextChoices):
IDLE = "idle", "Idle"
FETCHING = "fetching", "Fetching"
PARSING = "parsing", "Parsing"
ERROR = "error", "Error"
SUCCESS = "success", "Success"
PENDING_SETUP = "pending_setup", "Pending Setup"
DISABLED = "disabled", "Disabled"
"""Represents an M3U Account for IPTV streams."""
name = models.CharField(
max_length=255, unique=True, help_text="Unique name for this M3U account"
max_length=255,
unique=True,
help_text="Unique name for this M3U account"
)
server_url = models.URLField(
max_length=1000,
blank=True,
null=True,
help_text="The base URL of the M3U server (optional if a file is uploaded)",
help_text="The base URL of the M3U server (optional if a file is uploaded)"
)
file_path = models.CharField(
max_length=255,
blank=True,
null=True
)
file_path = models.CharField(max_length=255, blank=True, null=True)
server_group = models.ForeignKey(
"ServerGroup",
'ServerGroup',
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="m3u_accounts",
help_text="The server group this M3U account belongs to",
related_name='m3u_accounts',
help_text="The server group this M3U account belongs to"
)
max_streams = models.PositiveIntegerField(
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
default=0,
help_text="Maximum number of concurrent streams (0 for unlimited)"
)
is_active = models.BooleanField(
default=True, help_text="Set to false to deactivate this M3U account"
default=True,
help_text="Set to false to deactivate this M3U account"
)
created_at = models.DateTimeField(
auto_now_add=True, help_text="Time when this account was created"
auto_now_add=True,
help_text="Time when this account was created"
)
updated_at = models.DateTimeField(
null=True,
blank=True,
help_text="Time when this account was last successfully refreshed",
)
status = models.CharField(
max_length=20, choices=Status.choices, default=Status.IDLE
)
last_message = models.TextField(
null=True,
blank=True,
help_text="Last status message, including success results or error information",
auto_now=True,
help_text="Time when this account was last updated"
)
user_agent = models.ForeignKey(
"core.UserAgent",
'core.UserAgent',
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="m3u_accounts",
help_text="The User-Agent associated with this M3U account.",
related_name='m3u_accounts',
help_text="The User-Agent associated with this M3U account."
)
locked = models.BooleanField(
default=False, help_text="Protected - can't be deleted or modified"
default=False,
help_text="Protected - can't be deleted or modified"
)
stream_profile = models.ForeignKey(
StreamProfile,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="m3u_accounts",
related_name='m3u_accounts'
)
account_type = models.CharField(choices=Types.choices, default=Types.STADNARD)
username = models.CharField(max_length=255, null=True, blank=True)
password = models.CharField(max_length=255, null=True, blank=True)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
refresh_interval = models.IntegerField(default=0)
custom_properties = models.TextField(null=True, blank=True)
refresh_interval = models.IntegerField(default=24)
refresh_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
)
stale_stream_days = models.PositiveIntegerField(
default=7,
help_text="Number of days after which a stream will be removed if not seen in the M3U source.",
)
priority = models.PositiveIntegerField(
default=0,
help_text="Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.",
)
def __str__(self):
return self.name
@ -129,21 +104,10 @@ class M3UAccount(models.Model):
def get_user_agent(self):
user_agent = self.user_agent
if not user_agent:
user_agent = UserAgent.objects.get(
id=CoreSettings.get_default_user_agent_id()
)
user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
return user_agent
def save(self, *args, **kwargs):
# Prevent auto_now behavior by handling updated_at manually
if "update_fields" in kwargs and "updated_at" not in kwargs["update_fields"]:
# Don't modify updated_at for regular updates
kwargs.setdefault("update_fields", [])
if "updated_at" in kwargs["update_fields"]:
kwargs["update_fields"].remove("updated_at")
super().save(*args, **kwargs)
# def get_channel_groups(self):
# return ChannelGroup.objects.filter(m3u_account__m3u_account=self)
@ -155,40 +119,35 @@ class M3UAccount(models.Model):
# """Return all streams linked to this account with enabled ChannelGroups."""
# return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True))
class M3UFilter(models.Model):
"""Defines filters for M3U accounts based on stream name or group title."""
FILTER_TYPE_CHOICES = (
("group", "Group"),
("name", "Stream Name"),
("url", "Stream URL"),
('group', 'Group Title'),
('name', 'Stream Name'),
)
m3u_account = models.ForeignKey(
M3UAccount,
on_delete=models.CASCADE,
related_name="filters",
help_text="The M3U account this filter is applied to.",
related_name='filters',
help_text="The M3U account this filter is applied to."
)
filter_type = models.CharField(
max_length=50,
choices=FILTER_TYPE_CHOICES,
default="group",
help_text="Filter based on either group title or stream name.",
default='group',
help_text="Filter based on either group title or stream name."
)
regex_pattern = models.CharField(
max_length=200, help_text="A regex pattern to match streams or groups."
max_length=200,
help_text="A regex pattern to match streams or groups."
)
exclude = models.BooleanField(
default=True,
help_text="If True, matching items are excluded; if False, only matches are included.",
help_text="If True, matching items are excluded; if False, only matches are included."
)
order = models.PositiveIntegerField(default=0)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
def applies_to(self, stream_name, group_name):
target = group_name if self.filter_type == "group" else stream_name
target = group_name if self.filter_type == 'group' else stream_name
return bool(re.search(self.regex_pattern, target, re.IGNORECASE))
def clean(self):
@ -198,9 +157,7 @@ class M3UFilter(models.Model):
raise ValidationError(f"Invalid regex pattern: {self.regex_pattern}")
def __str__(self):
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(
self.filter_type, "Unknown"
)
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(self.filter_type, 'Unknown')
exclude_status = "Exclude" if self.exclude else "Include"
return f"[{self.m3u_account.name}] {filter_type_display}: {self.regex_pattern} ({exclude_status})"
@ -226,35 +183,40 @@ class M3UFilter(models.Model):
class ServerGroup(models.Model):
"""Represents a logical grouping of servers or channels."""
name = models.CharField(
max_length=100, unique=True, help_text="Unique name for this server group."
max_length=100,
unique=True,
help_text="Unique name for this server group."
)
def __str__(self):
return self.name
from django.db import models
class M3UAccountProfile(models.Model):
"""Represents a profile associated with an M3U Account."""
m3u_account = models.ForeignKey(
"M3UAccount",
'M3UAccount',
on_delete=models.CASCADE,
related_name="profiles",
help_text="The M3U account this profile belongs to.",
related_name='profiles',
help_text="The M3U account this profile belongs to."
)
name = models.CharField(
max_length=255, help_text="Name for the M3U account profile"
max_length=255,
help_text="Name for the M3U account profile"
)
is_default = models.BooleanField(
default=False, help_text="Set to false to deactivate this profile"
default=False,
help_text="Set to false to deactivate this profile"
)
max_streams = models.PositiveIntegerField(
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
default=0,
help_text="Maximum number of concurrent streams (0 for unlimited)"
)
is_active = models.BooleanField(
default=True, help_text="Set to false to deactivate this profile"
default=True,
help_text="Set to false to deactivate this profile"
)
search_pattern = models.CharField(
max_length=255,
@ -263,95 +225,22 @@ class M3UAccountProfile(models.Model):
max_length=255,
)
current_viewers = models.PositiveIntegerField(default=0)
custom_properties = models.JSONField(
default=dict,
blank=True,
null=True,
help_text="Custom properties for storing account information from provider (e.g., XC account details, expiration dates)"
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["m3u_account", "name"], name="unique_account_name"
)
models.UniqueConstraint(fields=['m3u_account', 'name'], name='unique_account_name')
]
def __str__(self):
return f"{self.name} ({self.m3u_account.name})"
def get_account_expiration(self):
"""Get account expiration date from custom properties if available"""
if not self.custom_properties:
return None
user_info = self.custom_properties.get('user_info', {})
exp_date = user_info.get('exp_date')
if exp_date:
try:
from datetime import datetime
# XC exp_date is typically a Unix timestamp
if isinstance(exp_date, (int, float)):
return datetime.fromtimestamp(exp_date)
elif isinstance(exp_date, str):
# Try to parse as timestamp first, then as ISO date
try:
return datetime.fromtimestamp(float(exp_date))
except ValueError:
return datetime.fromisoformat(exp_date)
except (ValueError, TypeError):
pass
return None
def get_account_status(self):
"""Get account status from custom properties if available"""
if not self.custom_properties:
return None
user_info = self.custom_properties.get('user_info', {})
return user_info.get('status')
def get_max_connections(self):
"""Get maximum connections from custom properties if available"""
if not self.custom_properties:
return None
user_info = self.custom_properties.get('user_info', {})
return user_info.get('max_connections')
def get_active_connections(self):
"""Get active connections from custom properties if available"""
if not self.custom_properties:
return None
user_info = self.custom_properties.get('user_info', {})
return user_info.get('active_cons')
def get_last_refresh(self):
"""Get last refresh timestamp from custom properties if available"""
if not self.custom_properties:
return None
last_refresh = self.custom_properties.get('last_refresh')
if last_refresh:
try:
from datetime import datetime
return datetime.fromisoformat(last_refresh)
except (ValueError, TypeError):
pass
return None
@receiver(models.signals.post_save, sender=M3UAccount)
def create_profile_for_m3u_account(sender, instance, created, **kwargs):
"""Automatically create an M3UAccountProfile when M3UAccount is created."""
if created:
M3UAccountProfile.objects.create(
m3u_account=instance,
name=f"{instance.name} Default",
name=f'{instance.name} Default',
max_streams=instance.max_streams,
is_default=True,
is_active=True,
@ -364,5 +253,6 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs):
is_default=True,
)
profile.max_streams = instance.max_streams
profile.save()

View file

@ -1,106 +1,41 @@
from core.utils import validate_flexible_url
from rest_framework import serializers, status
from rest_framework import serializers
from rest_framework.response import Response
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
from core.models import UserAgent
from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount
from apps.channels.serializers import (
ChannelGroupM3UAccountSerializer,
)
from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer
import logging
import json
logger = logging.getLogger(__name__)
class M3UFilterSerializer(serializers.ModelSerializer):
"""Serializer for M3U Filters"""
channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True)
class Meta:
model = M3UFilter
fields = [
"id",
"filter_type",
"regex_pattern",
"exclude",
"order",
"custom_properties",
]
fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups']
from rest_framework import serializers
from .models import M3UAccountProfile
class M3UAccountProfileSerializer(serializers.ModelSerializer):
account = serializers.SerializerMethodField()
def get_account(self, obj):
"""Include basic account information for frontend use"""
return {
'id': obj.m3u_account.id,
'name': obj.m3u_account.name,
'account_type': obj.m3u_account.account_type,
'is_xtream_codes': obj.m3u_account.account_type == 'XC'
}
class Meta:
model = M3UAccountProfile
fields = [
"id",
"name",
"max_streams",
"is_active",
"is_default",
"current_viewers",
"search_pattern",
"replace_pattern",
"custom_properties",
"account",
]
read_only_fields = ["id", "account"]
extra_kwargs = {
'search_pattern': {'required': False, 'allow_blank': True},
'replace_pattern': {'required': False, 'allow_blank': True},
}
fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern']
read_only_fields = ['id']
def create(self, validated_data):
m3u_account = self.context.get("m3u_account")
m3u_account = self.context.get('m3u_account')
# Use the m3u_account when creating the profile
validated_data["m3u_account_id"] = m3u_account.id
validated_data['m3u_account_id'] = m3u_account.id
return super().create(validated_data)
def validate(self, data):
"""Custom validation to handle default profiles"""
# For updates to existing instances
if self.instance and self.instance.is_default:
# For default profiles, search_pattern and replace_pattern are not required
# and we don't want to validate them since they shouldn't be changed
return data
# For non-default profiles or new profiles, ensure required fields are present
if not data.get('search_pattern'):
raise serializers.ValidationError({
'search_pattern': ['This field is required for non-default profiles.']
})
if not data.get('replace_pattern'):
raise serializers.ValidationError({
'replace_pattern': ['This field is required for non-default profiles.']
})
return data
def update(self, instance, validated_data):
if instance.is_default:
# For default profiles, only allow updating name and custom_properties (for notes)
allowed_fields = {'name', 'custom_properties'}
# Remove any fields that aren't allowed for default profiles
disallowed_fields = set(validated_data.keys()) - allowed_fields
if disallowed_fields:
raise serializers.ValidationError(
f"Default profiles can only modify name and notes. "
f"Cannot modify: {', '.join(disallowed_fields)}"
)
raise serializers.ValidationError("Default profiles cannot be modified.")
return super().update(instance, validated_data)
def destroy(self, request, *args, **kwargs):
@ -108,15 +43,13 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer):
if instance.is_default:
return Response(
{"error": "Default profiles cannot be deleted."},
status=status.HTTP_400_BAD_REQUEST,
status=status.HTTP_400_BAD_REQUEST
)
return super().destroy(request, *args, **kwargs)
class M3UAccountSerializer(serializers.ModelSerializer):
"""Serializer for M3U Account"""
filters = serializers.SerializerMethodField()
filters = M3UFilterSerializer(many=True, read_only=True)
# Include user_agent as a mandatory field using its primary key.
user_agent = serializers.PrimaryKeyRelatedField(
queryset=UserAgent.objects.all(),
@ -124,96 +57,21 @@ class M3UAccountSerializer(serializers.ModelSerializer):
allow_null=True,
)
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
read_only_fields = ["locked", "created_at", "updated_at"]
read_only_fields = ['locked', 'created_at', 'updated_at']
# channel_groups = serializers.SerializerMethodField()
channel_groups = ChannelGroupM3UAccountSerializer(
source="channel_group", many=True, required=False
)
server_url = serializers.CharField(
required=False,
allow_blank=True,
allow_null=True,
validators=[validate_flexible_url],
)
enable_vod = serializers.BooleanField(required=False, write_only=True)
auto_enable_new_groups_live = serializers.BooleanField(required=False, write_only=True)
auto_enable_new_groups_vod = serializers.BooleanField(required=False, write_only=True)
auto_enable_new_groups_series = serializers.BooleanField(required=False, write_only=True)
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)
class Meta:
model = M3UAccount
fields = [
"id",
"name",
"server_url",
"file_path",
"server_group",
"max_streams",
"is_active",
"created_at",
"updated_at",
"filters",
"user_agent",
"profiles",
"locked",
"channel_groups",
"refresh_interval",
"custom_properties",
"account_type",
"username",
"password",
"stale_stream_days",
"priority",
"status",
"last_message",
"enable_vod",
"auto_enable_new_groups_live",
"auto_enable_new_groups_vod",
"auto_enable_new_groups_series",
'id', 'name', 'server_url', 'file_path', 'server_group',
'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked',
'channel_groups', 'refresh_interval'
]
extra_kwargs = {
"password": {
"required": False,
"allow_blank": True,
},
}
def to_representation(self, instance):
data = super().to_representation(instance)
# Parse custom_properties to get VOD preference and auto_enable_new_groups settings
custom_props = instance.custom_properties or {}
data["enable_vod"] = custom_props.get("enable_vod", False)
data["auto_enable_new_groups_live"] = custom_props.get("auto_enable_new_groups_live", True)
data["auto_enable_new_groups_vod"] = custom_props.get("auto_enable_new_groups_vod", True)
data["auto_enable_new_groups_series"] = custom_props.get("auto_enable_new_groups_series", True)
return data
def update(self, instance, validated_data):
# Handle enable_vod preference and auto_enable_new_groups settings
enable_vod = validated_data.pop("enable_vod", None)
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", None)
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", None)
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", None)
# Get existing custom_properties
custom_props = instance.custom_properties or {}
# Update preferences
if enable_vod is not None:
custom_props["enable_vod"] = enable_vod
if auto_enable_new_groups_live is not None:
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
if auto_enable_new_groups_vod is not None:
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
if auto_enable_new_groups_series is not None:
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
validated_data["custom_properties"] = custom_props
# Pop out channel group memberships so we can handle them manually
channel_group_data = validated_data.pop("channel_group", [])
channel_group_data = validated_data.pop('channel_group', [])
# First, update the M3UAccount itself
for attr, value in validated_data.items():
@ -223,12 +81,13 @@ class M3UAccountSerializer(serializers.ModelSerializer):
# Prepare a list of memberships to update
memberships_to_update = []
for group_data in channel_group_data:
group = group_data.get("channel_group")
enabled = group_data.get("enabled")
group = group_data.get('channel_group')
enabled = group_data.get('enabled')
try:
membership = ChannelGroupM3UAccount.objects.get(
m3u_account=instance, channel_group=group
m3u_account=instance,
channel_group=group
)
membership.enabled = enabled
memberships_to_update.append(membership)
@ -237,39 +96,13 @@ class M3UAccountSerializer(serializers.ModelSerializer):
# Perform the bulk update
if memberships_to_update:
ChannelGroupM3UAccount.objects.bulk_update(
memberships_to_update, ["enabled"]
)
ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled'])
return instance
def create(self, validated_data):
# Handle enable_vod preference and auto_enable_new_groups settings during creation
enable_vod = validated_data.pop("enable_vod", False)
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", True)
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", True)
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", True)
# Parse existing custom_properties or create new
custom_props = validated_data.get("custom_properties", {})
# Set preferences (default to True for auto_enable_new_groups)
custom_props["enable_vod"] = enable_vod
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
validated_data["custom_properties"] = custom_props
return super().create(validated_data)
def get_filters(self, obj):
filters = obj.filters.order_by("order")
return M3UFilterSerializer(filters, many=True).data
class ServerGroupSerializer(serializers.ModelSerializer):
"""Serializer for Server Group"""
class Meta:
model = ServerGroup
fields = ["id", "name"]
fields = ['id', 'name']

View file

@ -1,13 +1,10 @@
# apps/m3u/signals.py
from django.db.models.signals import post_save, post_delete, pre_save
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from .models import M3UAccount
from .tasks import refresh_single_m3u_account, refresh_m3u_groups, delete_m3u_refresh_task_by_id
from .tasks import refresh_single_m3u_account, refresh_m3u_groups
from django_celery_beat.models import PeriodicTask, IntervalSchedule
import json
import logging
logger = logging.getLogger(__name__)
@receiver(post_save, sender=M3UAccount)
def refresh_account_on_save(sender, instance, created, **kwargs):
@ -16,7 +13,7 @@ def refresh_account_on_save(sender, instance, created, **kwargs):
call a Celery task that fetches & parses that single account
if it is active or newly created.
"""
if created and instance.account_type != M3UAccount.Types.XC:
if created:
refresh_m3u_groups.delay(instance.id)
@receiver(post_save, sender=M3UAccount)
@ -31,17 +28,21 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
period=IntervalSchedule.HOURS
)
# Task should be enabled only if refresh_interval != 0 AND account is active
should_be_enabled = (instance.refresh_interval != 0) and instance.is_active
# First check if the task already exists to avoid validation errors
try:
task = PeriodicTask.objects.get(name=task_name)
# Task exists, just update it
if not instance.refresh_task:
refresh_task = PeriodicTask.objects.create(
name=task_name,
interval=interval,
task="apps.m3u.tasks.refresh_single_m3u_account",
kwargs=json.dumps({"account_id": instance.id}),
enabled=instance.refresh_interval != 0,
)
M3UAccount.objects.filter(id=instance.id).update(refresh_task=refresh_task)
else:
task = instance.refresh_task
updated_fields = []
if task.enabled != should_be_enabled:
task.enabled = should_be_enabled
if task.enabled != (instance.refresh_interval != 0):
task.enabled = instance.refresh_interval != 0
updated_fields.append("enabled")
if task.interval != interval:
@ -51,60 +52,11 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
if updated_fields:
task.save(update_fields=updated_fields)
# Ensure instance has the task
if instance.refresh_task_id != task.id:
M3UAccount.objects.filter(id=instance.id).update(refresh_task=task)
except PeriodicTask.DoesNotExist:
# Create new task if it doesn't exist
refresh_task = PeriodicTask.objects.create(
name=task_name,
interval=interval,
task="apps.m3u.tasks.refresh_single_m3u_account",
kwargs=json.dumps({"account_id": instance.id}),
enabled=should_be_enabled,
)
M3UAccount.objects.filter(id=instance.id).update(refresh_task=refresh_task)
@receiver(post_delete, sender=M3UAccount)
def delete_refresh_task(sender, instance, **kwargs):
"""
Delete the associated Celery Beat periodic task when a Channel is deleted.
"""
try:
# First try the foreign key relationship to find the task ID
task = None
if instance.refresh_task:
logger.info(f"Found task via foreign key: {instance.refresh_task.id} for M3UAccount {instance.id}")
task = instance.refresh_task
# Use the helper function to delete the task
if task:
delete_m3u_refresh_task_by_id(instance.id)
else:
# Otherwise use the helper function
delete_m3u_refresh_task_by_id(instance.id)
except Exception as e:
logger.error(f"Error in delete_refresh_task signal handler: {str(e)}", exc_info=True)
@receiver(pre_save, sender=M3UAccount)
def update_status_on_active_change(sender, instance, **kwargs):
"""
When an M3UAccount's is_active field changes, update the status accordingly.
"""
if instance.pk: # Only for existing records, not new ones
try:
# Get the current record from the database
old_instance = M3UAccount.objects.get(pk=instance.pk)
# If is_active changed, update the status
if old_instance.is_active != instance.is_active:
if instance.is_active:
# When activating, set status to idle
instance.status = M3UAccount.Status.IDLE
else:
# When deactivating, set status to disabled
instance.status = M3UAccount.Status.DISABLED
except M3UAccount.DoesNotExist:
# New record, will use default status
pass
if instance.refresh_task:
instance.refresh_task.interval.delete()
instance.refresh_task.delete()

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more