mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'dev' into smoke-and-mirrors
This commit is contained in:
commit
9761ebe59d
59 changed files with 9179 additions and 2156 deletions
135
.github/workflows/base-image.yml
vendored
135
.github/workflows/base-image.yml
vendored
|
|
@ -2,42 +2,37 @@ name: Base Image Build
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
pull_request:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
build-base-image:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
|
|
@ -66,13 +61,111 @@ jobs:
|
|||
echo "branch_tag=base-${BRANCH}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker base image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/DispatcharrBase
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:base
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:base-${{ steps.timestamp.outputs.timestamp }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
200
.github/workflows/ci.yml
vendored
200
.github/workflows/ci.yml
vendored
|
|
@ -2,19 +2,84 @@ name: CI Pipeline
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
pull_request:
|
||||
branches: [ dev ]
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
branches: [dev]
|
||||
workflow_dispatch:
|
||||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
# compute a single timestamp, version, and repo metadata for the entire workflow
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
# no per-job outputs here; shared metadata comes from the `prepare` job
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -45,66 +110,85 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
# Get lowercase repository owner
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get repository name
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine branch name
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# For other branches, use the branch name
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Determine if this is from a fork
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# use metadata from the prepare job
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Build only the platform for this matrix job to avoid running amd64
|
||||
# stages under qemu on an arm64 runner (and vice-versa). This makes
|
||||
# the matrix runner's platform the one built by buildx.
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
# push arch-specific tags from each matrix job (they will be combined
|
||||
# into a multi-arch manifest in a follow-up job)
|
||||
tags: |
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.timestamp.outputs.timestamp }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ steps.meta.outputs.repo_owner }}
|
||||
REPO_NAME=${{ steps.meta.outputs.repo_name }}
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BASE_TAG=base
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ steps.timestamp.outputs.timestamp }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
# wait for prepare and all matrix builds to finish
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
VERSION=${{ needs.prepare.outputs.version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# branch tag (e.g. latest or dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# version + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
||||
# also create Docker Hub manifests using the same username
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
138
.github/workflows/release.yml
vendored
138
.github/workflows/release.yml
vendored
|
|
@ -15,16 +15,21 @@ on:
|
|||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
|
|
@ -38,14 +43,45 @@ jobs:
|
|||
NEW_VERSION=$(python -c "import version; print(f'{version.__version__}')")
|
||||
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set lowercase repo owner
|
||||
id: repo_owner
|
||||
- name: Set repository metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "lowercase=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
ref: main
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
|
@ -57,36 +93,88 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Release Image
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64, #linux/arm/v7 # Multi-arch support for releases
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-arm64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-arm64
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
create-release:
|
||||
needs: [prepare, create-manifest]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.update_version.outputs.new_version }}
|
||||
name: Release v${{ steps.update_version.outputs.new_version }}
|
||||
tag_name: v${{ needs.prepare.outputs.new_version }}
|
||||
name: Release v${{ needs.prepare.outputs.new_version }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -13,12 +13,14 @@ from .api_views import (
|
|||
UpdateChannelMembershipAPIView,
|
||||
BulkUpdateChannelMembershipAPIView,
|
||||
RecordingViewSet,
|
||||
RecurringRecordingRuleViewSet,
|
||||
GetChannelStreamsAPIView,
|
||||
SeriesRulesAPIView,
|
||||
DeleteSeriesRuleAPIView,
|
||||
EvaluateSeriesRulesAPIView,
|
||||
BulkRemoveSeriesRecordingsAPIView,
|
||||
BulkDeleteUpcomingRecordingsAPIView,
|
||||
ComskipConfigAPIView,
|
||||
)
|
||||
|
||||
app_name = 'channels' # for DRF routing
|
||||
|
|
@ -30,6 +32,7 @@ router.register(r'channels', ChannelViewSet, basename='channel')
|
|||
router.register(r'logos', LogoViewSet, basename='logo')
|
||||
router.register(r'profiles', ChannelProfileViewSet, basename='profile')
|
||||
router.register(r'recordings', RecordingViewSet, basename='recording')
|
||||
router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule')
|
||||
|
||||
urlpatterns = [
|
||||
# Bulk delete is a single APIView, not a ViewSet
|
||||
|
|
@ -46,6 +49,7 @@ urlpatterns = [
|
|||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from .models import (
|
|||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from .serializers import (
|
||||
StreamSerializer,
|
||||
|
|
@ -38,8 +39,17 @@ from .serializers import (
|
|||
BulkChannelProfileMembershipSerializer,
|
||||
ChannelProfileSerializer,
|
||||
RecordingSerializer,
|
||||
RecurringRecordingRuleSerializer,
|
||||
)
|
||||
from .tasks import (
|
||||
match_epg_channels,
|
||||
evaluate_series_rules,
|
||||
evaluate_series_rules_impl,
|
||||
match_single_channel_epg,
|
||||
match_selected_channels_epg,
|
||||
sync_recurring_rule_impl,
|
||||
purge_recurring_rule_impl,
|
||||
)
|
||||
from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl
|
||||
import django_filters
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
|
|
@ -49,10 +59,12 @@ from django.db.models import Q
|
|||
from django.http import StreamingHttpResponse, FileResponse, Http404
|
||||
from django.utils import timezone
|
||||
import mimetypes
|
||||
from django.conf import settings
|
||||
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -493,6 +505,99 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
"channels": serialized_channels
|
||||
})
|
||||
|
||||
@action(detail=False, methods=["post"], url_path="set-names-from-epg")
|
||||
def set_names_from_epg(self, request):
|
||||
"""
|
||||
Trigger a Celery task to set channel names from EPG data
|
||||
"""
|
||||
from .tasks import set_channels_names_from_epg
|
||||
|
||||
data = request.data
|
||||
channel_ids = data.get("channel_ids", [])
|
||||
|
||||
if not channel_ids:
|
||||
return Response(
|
||||
{"error": "channel_ids is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not isinstance(channel_ids, list):
|
||||
return Response(
|
||||
{"error": "channel_ids must be a list"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Start the Celery task
|
||||
task = set_channels_names_from_epg.delay(channel_ids)
|
||||
|
||||
return Response({
|
||||
"message": f"Started EPG name setting task for {len(channel_ids)} channels",
|
||||
"task_id": task.id,
|
||||
"channel_count": len(channel_ids)
|
||||
})
|
||||
|
||||
@action(detail=False, methods=["post"], url_path="set-logos-from-epg")
|
||||
def set_logos_from_epg(self, request):
|
||||
"""
|
||||
Trigger a Celery task to set channel logos from EPG data
|
||||
"""
|
||||
from .tasks import set_channels_logos_from_epg
|
||||
|
||||
data = request.data
|
||||
channel_ids = data.get("channel_ids", [])
|
||||
|
||||
if not channel_ids:
|
||||
return Response(
|
||||
{"error": "channel_ids is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not isinstance(channel_ids, list):
|
||||
return Response(
|
||||
{"error": "channel_ids must be a list"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Start the Celery task
|
||||
task = set_channels_logos_from_epg.delay(channel_ids)
|
||||
|
||||
return Response({
|
||||
"message": f"Started EPG logo setting task for {len(channel_ids)} channels",
|
||||
"task_id": task.id,
|
||||
"channel_count": len(channel_ids)
|
||||
})
|
||||
|
||||
@action(detail=False, methods=["post"], url_path="set-tvg-ids-from-epg")
|
||||
def set_tvg_ids_from_epg(self, request):
|
||||
"""
|
||||
Trigger a Celery task to set channel TVG-IDs from EPG data
|
||||
"""
|
||||
from .tasks import set_channels_tvg_ids_from_epg
|
||||
|
||||
data = request.data
|
||||
channel_ids = data.get("channel_ids", [])
|
||||
|
||||
if not channel_ids:
|
||||
return Response(
|
||||
{"error": "channel_ids is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not isinstance(channel_ids, list):
|
||||
return Response(
|
||||
{"error": "channel_ids must be a list"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Start the Celery task
|
||||
task = set_channels_tvg_ids_from_epg.delay(channel_ids)
|
||||
|
||||
return Response({
|
||||
"message": f"Started EPG TVG-ID setting task for {len(channel_ids)} channels",
|
||||
"task_id": task.id,
|
||||
"channel_count": len(channel_ids)
|
||||
})
|
||||
|
||||
@action(detail=False, methods=["get"], url_path="ids")
|
||||
def get_ids(self, request, *args, **kwargs):
|
||||
# Get the filtered queryset
|
||||
|
|
@ -642,10 +747,14 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
channel_data["channel_group_id"] = channel_group.id
|
||||
|
||||
if stream.logo_url:
|
||||
logo, _ = Logo.objects.get_or_create(
|
||||
url=stream.logo_url, defaults={"name": stream.name or stream.tvg_id}
|
||||
)
|
||||
channel_data["logo_id"] = logo.id
|
||||
# Import validation function
|
||||
from apps.channels.tasks import validate_logo_url
|
||||
validated_logo_url = validate_logo_url(stream.logo_url)
|
||||
if validated_logo_url:
|
||||
logo, _ = Logo.objects.get_or_create(
|
||||
url=validated_logo_url, defaults={"name": stream.name or stream.tvg_id}
|
||||
)
|
||||
channel_data["logo_id"] = logo.id
|
||||
|
||||
# Attempt to find existing EPGs with the same tvg-id
|
||||
epgs = EPGData.objects.filter(tvg_id=stream.tvg_id)
|
||||
|
|
@ -779,16 +888,65 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
# ─────────────────────────────────────────────────────────
|
||||
@swagger_auto_schema(
|
||||
method="post",
|
||||
operation_description="Kick off a Celery task that tries to fuzzy-match channels with EPG data.",
|
||||
operation_description="Kick off a Celery task that tries to fuzzy-match channels with EPG data. If channel_ids are provided, only those channels will be processed.",
|
||||
request_body=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
properties={
|
||||
'channel_ids': openapi.Schema(
|
||||
type=openapi.TYPE_ARRAY,
|
||||
items=openapi.Schema(type=openapi.TYPE_INTEGER),
|
||||
description='List of channel IDs to process. If empty or not provided, all channels without EPG will be processed.'
|
||||
)
|
||||
}
|
||||
),
|
||||
responses={202: "EPG matching task initiated"},
|
||||
)
|
||||
@action(detail=False, methods=["post"], url_path="match-epg")
|
||||
def match_epg(self, request):
|
||||
match_epg_channels.delay()
|
||||
# Get channel IDs from request body if provided
|
||||
channel_ids = request.data.get('channel_ids', [])
|
||||
|
||||
if channel_ids:
|
||||
# Process only selected channels
|
||||
from .tasks import match_selected_channels_epg
|
||||
match_selected_channels_epg.delay(channel_ids)
|
||||
message = f"EPG matching task initiated for {len(channel_ids)} selected channel(s)."
|
||||
else:
|
||||
# Process all channels without EPG (original behavior)
|
||||
match_epg_channels.delay()
|
||||
message = "EPG matching task initiated for all channels without EPG."
|
||||
|
||||
return Response(
|
||||
{"message": "EPG matching task initiated."}, status=status.HTTP_202_ACCEPTED
|
||||
{"message": message}, status=status.HTTP_202_ACCEPTED
|
||||
)
|
||||
|
||||
@swagger_auto_schema(
|
||||
method="post",
|
||||
operation_description="Try to auto-match this specific channel with EPG data.",
|
||||
responses={200: "EPG matching completed", 202: "EPG matching task initiated"},
|
||||
)
|
||||
@action(detail=True, methods=["post"], url_path="match-epg")
|
||||
def match_channel_epg(self, request, pk=None):
|
||||
channel = self.get_object()
|
||||
|
||||
# Import the matching logic
|
||||
from apps.channels.tasks import match_single_channel_epg
|
||||
|
||||
try:
|
||||
# Try to match this specific channel - call synchronously for immediate response
|
||||
result = match_single_channel_epg.apply_async(args=[channel.id]).get(timeout=30)
|
||||
|
||||
# Refresh the channel from DB to get any updates
|
||||
channel.refresh_from_db()
|
||||
|
||||
return Response({
|
||||
"message": result.get("message", "Channel matching completed"),
|
||||
"matched": result.get("matched", False),
|
||||
"channel": self.get_serializer(channel).data
|
||||
})
|
||||
except Exception as e:
|
||||
return Response({"error": str(e)}, status=400)
|
||||
|
||||
# ─────────────────────────────────────────────────────────
|
||||
# 7) Set EPG and Refresh
|
||||
# ─────────────────────────────────────────────────────────
|
||||
|
|
@ -1542,6 +1700,41 @@ class BulkUpdateChannelMembershipAPIView(APIView):
|
|||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class RecurringRecordingRuleViewSet(viewsets.ModelViewSet):
|
||||
queryset = RecurringRecordingRule.objects.all().select_related("channel")
|
||||
serializer_class = RecurringRecordingRuleSerializer
|
||||
|
||||
def get_permissions(self):
|
||||
return [IsAdmin()]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
rule = serializer.save()
|
||||
try:
|
||||
sync_recurring_rule_impl(rule.id, drop_existing=True)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to initialize recurring rule {rule.id}: {err}")
|
||||
return rule
|
||||
|
||||
def perform_update(self, serializer):
|
||||
rule = serializer.save()
|
||||
try:
|
||||
if rule.enabled:
|
||||
sync_recurring_rule_impl(rule.id, drop_existing=True)
|
||||
else:
|
||||
purge_recurring_rule_impl(rule.id)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to resync recurring rule {rule.id}: {err}")
|
||||
return rule
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
rule_id = instance.id
|
||||
super().perform_destroy(instance)
|
||||
try:
|
||||
purge_recurring_rule_impl(rule_id)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to purge recordings for rule {rule_id}: {err}")
|
||||
|
||||
|
||||
class RecordingViewSet(viewsets.ModelViewSet):
|
||||
queryset = Recording.objects.all()
|
||||
serializer_class = RecordingSerializer
|
||||
|
|
@ -1721,6 +1914,49 @@ class RecordingViewSet(viewsets.ModelViewSet):
|
|||
return response
|
||||
|
||||
|
||||
class ComskipConfigAPIView(APIView):
|
||||
"""Upload or inspect the custom comskip.ini used by DVR processing."""
|
||||
|
||||
parser_classes = [MultiPartParser, FormParser]
|
||||
|
||||
def get_permissions(self):
|
||||
return [IsAdmin()]
|
||||
|
||||
def get(self, request):
|
||||
path = CoreSettings.get_dvr_comskip_custom_path()
|
||||
exists = bool(path and os.path.exists(path))
|
||||
return Response({"path": path, "exists": exists})
|
||||
|
||||
def post(self, request):
|
||||
uploaded = request.FILES.get("file") or request.FILES.get("comskip_ini")
|
||||
if not uploaded:
|
||||
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
name = (uploaded.name or "").lower()
|
||||
if not name.endswith(".ini"):
|
||||
return Response({"error": "Only .ini files are allowed"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if uploaded.size and uploaded.size > 1024 * 1024:
|
||||
return Response({"error": "File too large (limit 1MB)"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dest_dir = os.path.join(settings.MEDIA_ROOT, "comskip")
|
||||
os.makedirs(dest_dir, exist_ok=True)
|
||||
dest_path = os.path.join(dest_dir, "comskip.ini")
|
||||
|
||||
try:
|
||||
with open(dest_path, "wb") as dest:
|
||||
for chunk in uploaded.chunks():
|
||||
dest.write(chunk)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save uploaded comskip.ini: {e}")
|
||||
return Response({"error": "Unable to save file"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
# Persist path setting so DVR processing picks it up immediately
|
||||
CoreSettings.set_dvr_comskip_custom_path(dest_path)
|
||||
|
||||
return Response({"success": True, "path": dest_path, "exists": os.path.exists(dest_path)})
|
||||
|
||||
|
||||
class BulkDeleteUpcomingRecordingsAPIView(APIView):
|
||||
"""Delete all upcoming (future) recordings."""
|
||||
def get_permissions(self):
|
||||
|
|
|
|||
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 5.0.14 on 2025-09-18 14:56
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RecurringRecordingRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('days_of_week', models.JSONField(default=list)),
|
||||
('start_time', models.TimeField()),
|
||||
('end_time', models.TimeField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('name', models.CharField(blank=True, max_length=255)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['channel', 'start_time'],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-05 20:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0026_recurringrecordingrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='end_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='start_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-06 22:55
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0027_recurringrecordingrule_end_date_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='created_at',
|
||||
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='Timestamp when this channel was created'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(auto_now=True, help_text='Timestamp when this channel was last updated'),
|
||||
),
|
||||
]
|
||||
|
|
@ -119,11 +119,11 @@ class Stream(models.Model):
|
|||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
|
|
@ -303,6 +303,15 @@ class Channel(models.Model):
|
|||
help_text="The M3U account that auto-created this channel"
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Timestamp when this channel was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="Timestamp when this channel was last updated"
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
# Enforce unique channel_number within a given group
|
||||
existing = Channel.objects.filter(
|
||||
|
|
@ -601,3 +610,35 @@ class Recording(models.Model):
|
|||
|
||||
def __str__(self):
|
||||
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
|
||||
|
||||
|
||||
class RecurringRecordingRule(models.Model):
|
||||
"""Rule describing a recurring manual DVR schedule."""
|
||||
|
||||
channel = models.ForeignKey(
|
||||
"Channel",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="recurring_rules",
|
||||
)
|
||||
days_of_week = models.JSONField(default=list)
|
||||
start_time = models.TimeField()
|
||||
end_time = models.TimeField()
|
||||
enabled = models.BooleanField(default=True)
|
||||
name = models.CharField(max_length=255, blank=True)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
end_date = models.DateField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["channel", "start_time"]
|
||||
|
||||
def __str__(self):
|
||||
channel_name = getattr(self.channel, "name", str(self.channel_id))
|
||||
return f"Recurring rule for {channel_name}"
|
||||
|
||||
def cleaned_days(self):
|
||||
try:
|
||||
return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6})
|
||||
except Exception:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import (
|
||||
Stream,
|
||||
|
|
@ -10,6 +12,7 @@ from .models import (
|
|||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from apps.epg.serializers import EPGDataSerializer
|
||||
from core.models import StreamProfile
|
||||
|
|
@ -454,6 +457,13 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
start_time = data.get("start_time")
|
||||
end_time = data.get("end_time")
|
||||
|
||||
if start_time and timezone.is_naive(start_time):
|
||||
start_time = timezone.make_aware(start_time, timezone.get_current_timezone())
|
||||
data["start_time"] = start_time
|
||||
if end_time and timezone.is_naive(end_time):
|
||||
end_time = timezone.make_aware(end_time, timezone.get_current_timezone())
|
||||
data["end_time"] = end_time
|
||||
|
||||
# If this is an EPG-based recording (program provided), apply global pre/post offsets
|
||||
try:
|
||||
cp = data.get("custom_properties") or {}
|
||||
|
|
@ -497,3 +507,56 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
raise serializers.ValidationError("End time must be after start time.")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RecurringRecordingRuleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = RecurringRecordingRule
|
||||
fields = "__all__"
|
||||
read_only_fields = ["created_at", "updated_at"]
|
||||
|
||||
def validate_days_of_week(self, value):
|
||||
if not value:
|
||||
raise serializers.ValidationError("Select at least one day of the week")
|
||||
cleaned = []
|
||||
for entry in value:
|
||||
try:
|
||||
iv = int(entry)
|
||||
except (TypeError, ValueError):
|
||||
raise serializers.ValidationError("Days of week must be integers 0-6")
|
||||
if iv < 0 or iv > 6:
|
||||
raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)")
|
||||
cleaned.append(iv)
|
||||
return sorted(set(cleaned))
|
||||
|
||||
def validate(self, attrs):
|
||||
start = attrs.get("start_time") or getattr(self.instance, "start_time", None)
|
||||
end = attrs.get("end_time") or getattr(self.instance, "end_time", None)
|
||||
start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None)
|
||||
end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None)
|
||||
if start_date is None:
|
||||
existing_start = getattr(self.instance, "start_date", None)
|
||||
if existing_start is None:
|
||||
raise serializers.ValidationError("Start date is required")
|
||||
if start_date and end_date and end_date < start_date:
|
||||
raise serializers.ValidationError("End date must be on or after start date")
|
||||
if end_date is None:
|
||||
existing_end = getattr(self.instance, "end_date", None)
|
||||
if existing_end is None:
|
||||
raise serializers.ValidationError("End date is required")
|
||||
if start and end and start_date and end_date:
|
||||
start_dt = datetime.combine(start_date, start)
|
||||
end_dt = datetime.combine(end_date, end)
|
||||
if end_dt <= start_dt:
|
||||
raise serializers.ValidationError("End datetime must be after start datetime")
|
||||
elif start and end and end == start:
|
||||
raise serializers.ValidationError("End time must be different from start time")
|
||||
# Normalize empty strings to None for dates
|
||||
if attrs.get("end_date") == "":
|
||||
attrs["end_date"] = None
|
||||
if attrs.get("start_date") == "":
|
||||
attrs["start_date"] = None
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
return super().create(validated_data)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
0
apps/channels/tests/__init__.py
Normal file
0
apps/channels/tests/__init__.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from datetime import datetime, timedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.channels.models import Channel, RecurringRecordingRule, Recording
|
||||
from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl
|
||||
|
||||
|
||||
class RecurringRecordingRuleTasksTests(TestCase):
|
||||
def test_sync_recurring_rule_creates_and_purges_recordings(self):
|
||||
now = timezone.now()
|
||||
channel = Channel.objects.create(channel_number=1, name='Test Channel')
|
||||
|
||||
start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0)
|
||||
end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0)
|
||||
|
||||
rule = RecurringRecordingRule.objects.create(
|
||||
channel=channel,
|
||||
days_of_week=[now.weekday()],
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
)
|
||||
|
||||
created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1)
|
||||
self.assertEqual(created, 1)
|
||||
|
||||
recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first()
|
||||
self.assertIsNotNone(recording)
|
||||
self.assertEqual(recording.channel, channel)
|
||||
self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id)
|
||||
|
||||
expected_start = timezone.make_aware(
|
||||
datetime.combine(recording.start_time.date(), start_time),
|
||||
timezone.get_current_timezone(),
|
||||
)
|
||||
self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60)
|
||||
|
||||
removed = purge_recurring_rule_impl(rule.id)
|
||||
self.assertEqual(removed, 1)
|
||||
self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists())
|
||||
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-16 22:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0015_alter_programdata_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgdata',
|
||||
name='icon_url',
|
||||
field=models.URLField(blank=True, max_length=500, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-24 21:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0016_epgdata_icon_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='url',
|
||||
field=models.URLField(blank=True, max_length=1000, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -28,7 +28,7 @@ class EPGSource(models.Model):
|
|||
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
source_type = models.CharField(max_length=20, choices=SOURCE_TYPE_CHOICES)
|
||||
url = models.URLField(blank=True, null=True) # For XMLTV
|
||||
url = models.URLField(max_length=1000, blank=True, null=True) # For XMLTV
|
||||
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
|
||||
is_active = models.BooleanField(default=True)
|
||||
file_path = models.CharField(max_length=1024, blank=True, null=True)
|
||||
|
|
@ -127,6 +127,7 @@ class EPGData(models.Model):
|
|||
# and a name (which might simply be the tvg_id if no real channel exists).
|
||||
tvg_id = models.CharField(max_length=255, null=True, blank=True, db_index=True)
|
||||
name = models.CharField(max_length=255)
|
||||
icon_url = models.URLField(max_length=500, null=True, blank=True)
|
||||
epg_source = models.ForeignKey(
|
||||
EPGSource,
|
||||
on_delete=models.CASCADE,
|
||||
|
|
|
|||
|
|
@ -52,5 +52,6 @@ class EPGDataSerializer(serializers.ModelSerializer):
|
|||
'id',
|
||||
'tvg_id',
|
||||
'name',
|
||||
'icon_url',
|
||||
'epg_source',
|
||||
]
|
||||
|
|
|
|||
|
|
@ -28,6 +28,23 @@ from core.utils import acquire_task_lock, release_task_lock, send_websocket_upda
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_icon_url_fast(icon_url, max_length=None):
|
||||
"""
|
||||
Fast validation for icon URLs during parsing.
|
||||
Returns None if URL is too long, original URL otherwise.
|
||||
If max_length is None, gets it dynamically from the EPGData model field.
|
||||
"""
|
||||
if max_length is None:
|
||||
# Get max_length dynamically from the model field
|
||||
max_length = EPGData._meta.get_field('icon_url').max_length
|
||||
|
||||
if icon_url and len(icon_url) > max_length:
|
||||
logger.warning(f"Icon URL too long ({len(icon_url)} > {max_length}), skipping: {icon_url[:100]}...")
|
||||
return None
|
||||
return icon_url
|
||||
|
||||
|
||||
MAX_EXTRACT_CHUNK_SIZE = 65536 # 64kb (base2)
|
||||
|
||||
|
||||
|
|
@ -831,6 +848,7 @@ def parse_channels_only(source):
|
|||
processed_channels = 0
|
||||
batch_size = 500 # Process in batches to limit memory usage
|
||||
progress = 0 # Initialize progress variable here
|
||||
icon_url_max_length = EPGData._meta.get_field('icon_url').max_length # Get max length for icon_url field
|
||||
|
||||
# Track memory at key points
|
||||
if process:
|
||||
|
|
@ -859,7 +877,7 @@ def parse_channels_only(source):
|
|||
|
||||
# Change iterparse to look for both channel and programme elements
|
||||
logger.debug(f"Creating iterparse context for channels and programmes")
|
||||
channel_parser = etree.iterparse(source_file, events=('end',), tag=('channel', 'programme'), remove_blank_text=True)
|
||||
channel_parser = etree.iterparse(source_file, events=('end',), tag=('channel', 'programme'), remove_blank_text=True, recover=True)
|
||||
if process:
|
||||
logger.debug(f"[parse_channels_only] Memory after creating iterparse: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
|
||||
|
|
@ -873,10 +891,15 @@ def parse_channels_only(source):
|
|||
tvg_id = elem.get('id', '').strip()
|
||||
if tvg_id:
|
||||
display_name = None
|
||||
icon_url = None
|
||||
for child in elem:
|
||||
if child.tag == 'display-name' and child.text:
|
||||
if display_name is None and child.tag == 'display-name' and child.text:
|
||||
display_name = child.text.strip()
|
||||
break
|
||||
elif child.tag == 'icon':
|
||||
raw_icon_url = child.get('src', '').strip()
|
||||
icon_url = validate_icon_url_fast(raw_icon_url, icon_url_max_length)
|
||||
if display_name and icon_url:
|
||||
break # No need to continue if we have both
|
||||
|
||||
if not display_name:
|
||||
display_name = tvg_id
|
||||
|
|
@ -894,17 +917,24 @@ def parse_channels_only(source):
|
|||
epgs_to_create.append(EPGData(
|
||||
tvg_id=tvg_id,
|
||||
name=display_name,
|
||||
icon_url=icon_url,
|
||||
epg_source=source,
|
||||
))
|
||||
logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 1: {tvg_id} - {display_name}")
|
||||
processed_channels += 1
|
||||
continue
|
||||
|
||||
# We use the cached object to check if the name has changed
|
||||
# We use the cached object to check if the name or icon_url has changed
|
||||
epg_obj = existing_epgs[tvg_id]
|
||||
needs_update = False
|
||||
if epg_obj.name != display_name:
|
||||
# Only update if the name actually changed
|
||||
epg_obj.name = display_name
|
||||
needs_update = True
|
||||
if epg_obj.icon_url != icon_url:
|
||||
epg_obj.icon_url = icon_url
|
||||
needs_update = True
|
||||
|
||||
if needs_update:
|
||||
epgs_to_update.append(epg_obj)
|
||||
logger.debug(f"[parse_channels_only] Added channel to update to epgs_to_update: {tvg_id} - {display_name}")
|
||||
else:
|
||||
|
|
@ -915,6 +945,7 @@ def parse_channels_only(source):
|
|||
epgs_to_create.append(EPGData(
|
||||
tvg_id=tvg_id,
|
||||
name=display_name,
|
||||
icon_url=icon_url,
|
||||
epg_source=source,
|
||||
))
|
||||
logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 2: {tvg_id} - {display_name}")
|
||||
|
|
@ -937,7 +968,7 @@ def parse_channels_only(source):
|
|||
logger.info(f"[parse_channels_only] Bulk updating {len(epgs_to_update)} EPG entries")
|
||||
if process:
|
||||
logger.info(f"[parse_channels_only] Memory before bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name"])
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"])
|
||||
if process:
|
||||
logger.info(f"[parse_channels_only] Memory after bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
epgs_to_update = []
|
||||
|
|
@ -1004,7 +1035,7 @@ def parse_channels_only(source):
|
|||
logger.debug(f"[parse_channels_only] Created final batch of {len(epgs_to_create)} EPG entries")
|
||||
|
||||
if epgs_to_update:
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name"])
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"])
|
||||
logger.debug(f"[parse_channels_only] Updated final batch of {len(epgs_to_update)} EPG entries")
|
||||
if process:
|
||||
logger.debug(f"[parse_channels_only] Memory after final batch creation: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
|
|
@ -1211,7 +1242,7 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
source_file = open(file_path, 'rb')
|
||||
|
||||
# Stream parse the file using lxml's iterparse
|
||||
program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True)
|
||||
program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True, recover=True)
|
||||
|
||||
for _, elem in program_parser:
|
||||
if elem.get('channel') == epg.tvg_id:
|
||||
|
|
|
|||
|
|
@ -81,6 +81,13 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
account_type = response.data.get("account_type")
|
||||
account_id = response.data.get("id")
|
||||
|
||||
# Notify frontend that a new playlist was created
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'playlist_created',
|
||||
'playlist_id': account_id
|
||||
})
|
||||
|
||||
if account_type == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(account_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -774,7 +774,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
group_title = group_name
|
||||
|
||||
stream_hash = Stream.generate_hash_key(
|
||||
name, url, tvg_id, hash_keys
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id
|
||||
)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
|
|
@ -903,6 +903,8 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
stream_hashes = {}
|
||||
|
||||
logger.debug(f"Processing batch of {len(batch)} for M3U account {account_id}")
|
||||
if compiled_filters:
|
||||
logger.debug(f"Using compiled filters: {[f[1].regex_pattern for f in compiled_filters]}")
|
||||
for stream_info in batch:
|
||||
try:
|
||||
name, url = stream_info["name"], stream_info["url"]
|
||||
|
|
@ -912,10 +914,10 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
group_title = get_case_insensitive_attr(
|
||||
stream_info["attributes"], "group-title", "Default Group"
|
||||
)
|
||||
|
||||
logger.debug(f"Processing stream: {name} - {url} in group {group_title}")
|
||||
include = True
|
||||
for pattern, filter in compiled_filters:
|
||||
logger.debug(f"Checking filter patterh {pattern}")
|
||||
logger.trace(f"Checking filter pattern {pattern}")
|
||||
target = name
|
||||
if filter.filter_type == "url":
|
||||
target = url
|
||||
|
|
@ -940,7 +942,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
)
|
||||
continue
|
||||
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys)
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
"url": url,
|
||||
|
|
@ -2071,13 +2073,13 @@ def get_transformed_credentials(account, profile=None):
|
|||
base_url = account.server_url
|
||||
base_username = account.username
|
||||
base_password = account.password # Build a complete URL with credentials (similar to how IPTV URLs are structured)
|
||||
# Format: http://server.com:port/username/password/rest_of_path
|
||||
# Format: http://server.com:port/live/username/password/1234.ts
|
||||
if base_url and base_username and base_password:
|
||||
# Remove trailing slash from server URL if present
|
||||
clean_server_url = base_url.rstrip('/')
|
||||
|
||||
# Build the complete URL with embedded credentials
|
||||
complete_url = f"{clean_server_url}/{base_username}/{base_password}/"
|
||||
complete_url = f"{clean_server_url}/live/{base_username}/{base_password}/1234.ts"
|
||||
logger.debug(f"Built complete URL: {complete_url}")
|
||||
|
||||
# Apply profile-specific transformations if profile is provided
|
||||
|
|
@ -2091,14 +2093,14 @@ def get_transformed_credentials(account, profile=None):
|
|||
logger.info(f"Transformed complete URL: {complete_url} -> {transformed_complete_url}")
|
||||
|
||||
# Extract components from the transformed URL
|
||||
# Pattern: http://server.com:port/username/password/
|
||||
# Pattern: http://server.com:port/live/username/password/1234.ts
|
||||
parsed_url = urllib.parse.urlparse(transformed_complete_url)
|
||||
path_parts = [part for part in parsed_url.path.split('/') if part]
|
||||
|
||||
if len(path_parts) >= 2:
|
||||
# Extract username and password from path
|
||||
transformed_username = path_parts[0]
|
||||
transformed_password = path_parts[1]
|
||||
transformed_username = path_parts[1]
|
||||
transformed_password = path_parts[2]
|
||||
|
||||
# Rebuild server URL without the username/password path
|
||||
transformed_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
||||
|
|
@ -2521,76 +2523,75 @@ def refresh_single_m3u_account(account_id):
|
|||
|
||||
if not all_xc_streams:
|
||||
logger.warning("No streams collected from XC groups")
|
||||
return f"No streams found for XC account {account_id}", None
|
||||
else:
|
||||
# Now batch by stream count (like standard M3U processing)
|
||||
batches = [
|
||||
all_xc_streams[i : i + BATCH_SIZE]
|
||||
for i in range(0, len(all_xc_streams), BATCH_SIZE)
|
||||
]
|
||||
|
||||
# Now batch by stream count (like standard M3U processing)
|
||||
batches = [
|
||||
all_xc_streams[i : i + BATCH_SIZE]
|
||||
for i in range(0, len(all_xc_streams), BATCH_SIZE)
|
||||
]
|
||||
logger.info(f"Processing {len(all_xc_streams)} XC streams in {len(batches)} batches")
|
||||
|
||||
logger.info(f"Processing {len(all_xc_streams)} XC streams in {len(batches)} batches")
|
||||
# Use threading for XC stream processing - now with consistent batch sizes
|
||||
max_workers = min(4, len(batches))
|
||||
logger.debug(f"Using {max_workers} threads for XC stream processing")
|
||||
|
||||
# Use threading for XC stream processing - now with consistent batch sizes
|
||||
max_workers = min(4, len(batches))
|
||||
logger.debug(f"Using {max_workers} threads for XC stream processing")
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
# Submit stream batch processing tasks (reuse standard M3U processing)
|
||||
future_to_batch = {
|
||||
executor.submit(process_m3u_batch_direct, account_id, batch, existing_groups, hash_keys): i
|
||||
for i, batch in enumerate(batches)
|
||||
}
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
# Submit stream batch processing tasks (reuse standard M3U processing)
|
||||
future_to_batch = {
|
||||
executor.submit(process_m3u_batch_direct, account_id, batch, existing_groups, hash_keys): i
|
||||
for i, batch in enumerate(batches)
|
||||
}
|
||||
completed_batches = 0
|
||||
total_batches = len(batches)
|
||||
|
||||
completed_batches = 0
|
||||
total_batches = len(batches)
|
||||
# Process completed batches as they finish
|
||||
for future in as_completed(future_to_batch):
|
||||
batch_idx = future_to_batch[future]
|
||||
try:
|
||||
result = future.result()
|
||||
completed_batches += 1
|
||||
|
||||
# Process completed batches as they finish
|
||||
for future in as_completed(future_to_batch):
|
||||
batch_idx = future_to_batch[future]
|
||||
try:
|
||||
result = future.result()
|
||||
completed_batches += 1
|
||||
# Extract stream counts from result
|
||||
if isinstance(result, str):
|
||||
try:
|
||||
created_match = re.search(r"(\d+) created", result)
|
||||
updated_match = re.search(r"(\d+) updated", result)
|
||||
if created_match and updated_match:
|
||||
created_count = int(created_match.group(1))
|
||||
updated_count = int(updated_match.group(1))
|
||||
streams_created += created_count
|
||||
streams_updated += updated_count
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
# Extract stream counts from result
|
||||
if isinstance(result, str):
|
||||
try:
|
||||
created_match = re.search(r"(\d+) created", result)
|
||||
updated_match = re.search(r"(\d+) updated", result)
|
||||
if created_match and updated_match:
|
||||
created_count = int(created_match.group(1))
|
||||
updated_count = int(updated_match.group(1))
|
||||
streams_created += created_count
|
||||
streams_updated += updated_count
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
# Send progress update
|
||||
progress = int((completed_batches / total_batches) * 100)
|
||||
current_elapsed = time.time() - start_time
|
||||
|
||||
# Send progress update
|
||||
progress = int((completed_batches / total_batches) * 100)
|
||||
current_elapsed = time.time() - start_time
|
||||
if progress > 0:
|
||||
estimated_total = (current_elapsed / progress) * 100
|
||||
time_remaining = max(0, estimated_total - current_elapsed)
|
||||
else:
|
||||
time_remaining = 0
|
||||
|
||||
if progress > 0:
|
||||
estimated_total = (current_elapsed / progress) * 100
|
||||
time_remaining = max(0, estimated_total - current_elapsed)
|
||||
else:
|
||||
time_remaining = 0
|
||||
send_m3u_update(
|
||||
account_id,
|
||||
"parsing",
|
||||
progress,
|
||||
elapsed_time=current_elapsed,
|
||||
time_remaining=time_remaining,
|
||||
streams_processed=streams_created + streams_updated,
|
||||
)
|
||||
|
||||
send_m3u_update(
|
||||
account_id,
|
||||
"parsing",
|
||||
progress,
|
||||
elapsed_time=current_elapsed,
|
||||
time_remaining=time_remaining,
|
||||
streams_processed=streams_created + streams_updated,
|
||||
)
|
||||
logger.debug(f"XC thread batch {completed_batches}/{total_batches} completed")
|
||||
|
||||
logger.debug(f"XC thread batch {completed_batches}/{total_batches} completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in XC thread batch {batch_idx}: {str(e)}")
|
||||
completed_batches += 1 # Still count it to avoid hanging
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in XC thread batch {batch_idx}: {str(e)}")
|
||||
completed_batches += 1 # Still count it to avoid hanging
|
||||
|
||||
logger.info(f"XC thread-based processing completed for account {account_id}")
|
||||
logger.info(f"XC thread-based processing completed for account {account_id}")
|
||||
|
||||
# Ensure all database transactions are committed before cleanup
|
||||
logger.info(
|
||||
|
|
@ -2671,7 +2672,16 @@ def refresh_single_m3u_account(account_id):
|
|||
release_task_lock("refresh_single_m3u_account", account_id)
|
||||
|
||||
# Aggressive garbage collection
|
||||
del existing_groups, extinf_data, groups, batches
|
||||
# Only delete variables if they exist
|
||||
if 'existing_groups' in locals():
|
||||
del existing_groups
|
||||
if 'extinf_data' in locals():
|
||||
del extinf_data
|
||||
if 'groups' in locals():
|
||||
del groups
|
||||
if 'batches' in locals():
|
||||
del batches
|
||||
|
||||
from core.utils import cleanup_memory
|
||||
|
||||
cleanup_memory(log_usage=True, force_collection=True)
|
||||
|
|
|
|||
|
|
@ -45,45 +45,48 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
The stream URL now points to the new stream_view that uses StreamProfile.
|
||||
Supports both GET and POST methods for compatibility with IPTVSmarters.
|
||||
"""
|
||||
logger.debug("Generating M3U for profile: %s, user: %s", profile_name, user.username if user else "Anonymous")
|
||||
# Check if this is a POST request with data (which we don't want to allow)
|
||||
if request.method == "POST" and request.body:
|
||||
return HttpResponseForbidden("POST requests with content are not allowed")
|
||||
|
||||
if user is not None:
|
||||
if user.user_level == 0:
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
}
|
||||
user_profile_count = user.channel_profiles.count()
|
||||
|
||||
if user.channel_profiles.count() != 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
filters["channelprofilemembership__channel_profile__in"] = (
|
||||
channel_profiles
|
||||
)
|
||||
|
||||
channels = Channel.objects.filter(**filters).order_by("channel_number")
|
||||
# If user has ALL profiles or NO profiles, give unrestricted access
|
||||
if user_profile_count == 0:
|
||||
# No profile filtering - user sees all channels based on user_level
|
||||
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by("channel_number")
|
||||
else:
|
||||
# User has specific limited profiles assigned
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
"channelprofilemembership__channel_profile__in": user.channel_profiles.all()
|
||||
}
|
||||
channels = Channel.objects.filter(**filters).distinct().order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by(
|
||||
"channel_number"
|
||||
)
|
||||
|
||||
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
else:
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
else:
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
|
||||
# Check if the request wants to use direct logo URLs instead of cache
|
||||
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
|
||||
|
|
@ -95,7 +98,22 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
# Options: 'channel_number' (default), 'tvg_id', 'gracenote'
|
||||
tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower()
|
||||
|
||||
m3u_content = "#EXTM3U\n"
|
||||
# Build EPG URL with query parameters if needed
|
||||
epg_base_url = build_absolute_uri_with_port(request, reverse('output:epg_endpoint', args=[profile_name]) if profile_name else reverse('output:epg_endpoint'))
|
||||
|
||||
# Optionally preserve certain query parameters
|
||||
preserved_params = ['tvg_id_source', 'cachedlogos', 'days']
|
||||
query_params = {k: v for k, v in request.GET.items() if k in preserved_params}
|
||||
if query_params:
|
||||
from urllib.parse import urlencode
|
||||
epg_url = f"{epg_base_url}?{urlencode(query_params)}"
|
||||
else:
|
||||
epg_url = epg_base_url
|
||||
|
||||
# Add x-tvg-url and url-tvg attribute for EPG URL
|
||||
m3u_content = f'#EXTM3U x-tvg-url="{epg_url}" url-tvg="{epg_url}"\n'
|
||||
|
||||
# Start building M3U content
|
||||
for channel in channels:
|
||||
group_title = channel.channel_group.name if channel.channel_group else "Default"
|
||||
|
||||
|
|
@ -148,7 +166,7 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
# Determine the stream URL based on the direct parameter
|
||||
if use_direct_urls:
|
||||
# Try to get the first stream's direct URL
|
||||
first_stream = channel.streams.first()
|
||||
first_stream = channel.streams.order_by('channelstream__order').first()
|
||||
if first_stream and first_stream.url:
|
||||
# Use the direct stream URL
|
||||
stream_url = first_stream.url
|
||||
|
|
@ -300,18 +318,20 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
# Get channels based on user/profile
|
||||
if user is not None:
|
||||
if user.user_level == 0:
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
}
|
||||
user_profile_count = user.channel_profiles.count()
|
||||
|
||||
if user.channel_profiles.count() != 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
filters["channelprofilemembership__channel_profile__in"] = (
|
||||
channel_profiles
|
||||
)
|
||||
|
||||
channels = Channel.objects.filter(**filters).order_by("channel_number")
|
||||
# If user has ALL profiles or NO profiles, give unrestricted access
|
||||
if user_profile_count == 0:
|
||||
# No profile filtering - user sees all channels based on user_level
|
||||
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by("channel_number")
|
||||
else:
|
||||
# User has specific limited profiles assigned
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
"channelprofilemembership__channel_profile__in": user.channel_profiles.all()
|
||||
}
|
||||
channels = Channel.objects.filter(**filters).distinct().order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by(
|
||||
"channel_number"
|
||||
|
|
@ -848,19 +868,22 @@ def xc_get_live_categories(user):
|
|||
response = []
|
||||
|
||||
if user.user_level == 0:
|
||||
filters = {
|
||||
"channels__channelprofilemembership__enabled": True,
|
||||
"channels__user_level": 0,
|
||||
}
|
||||
user_profile_count = user.channel_profiles.count()
|
||||
|
||||
if user.channel_profiles.count() != 0:
|
||||
# Only get data from active profile
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
filters["channels__channelprofilemembership__channel_profile__in"] = (
|
||||
channel_profiles
|
||||
)
|
||||
|
||||
channel_groups = ChannelGroup.objects.filter(**filters).distinct().order_by(Lower("name"))
|
||||
# If user has ALL profiles or NO profiles, give unrestricted access
|
||||
if user_profile_count == 0:
|
||||
# No profile filtering - user sees all channel groups
|
||||
channel_groups = ChannelGroup.objects.filter(
|
||||
channels__isnull=False, channels__user_level__lte=user.user_level
|
||||
).distinct().order_by(Lower("name"))
|
||||
else:
|
||||
# User has specific limited profiles assigned
|
||||
filters = {
|
||||
"channels__channelprofilemembership__enabled": True,
|
||||
"channels__user_level": 0,
|
||||
"channels__channelprofilemembership__channel_profile__in": user.channel_profiles.all()
|
||||
}
|
||||
channel_groups = ChannelGroup.objects.filter(**filters).distinct().order_by(Lower("name"))
|
||||
else:
|
||||
channel_groups = ChannelGroup.objects.filter(
|
||||
channels__isnull=False, channels__user_level__lte=user.user_level
|
||||
|
|
@ -882,20 +905,25 @@ def xc_get_live_streams(request, user, category_id=None):
|
|||
streams = []
|
||||
|
||||
if user.user_level == 0:
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
}
|
||||
user_profile_count = user.channel_profiles.count()
|
||||
|
||||
if user.channel_profiles.count() > 0:
|
||||
# Only get data from active profile
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
filters["channelprofilemembership__channel_profile__in"] = channel_profiles
|
||||
|
||||
if category_id is not None:
|
||||
filters["channel_group__id"] = category_id
|
||||
|
||||
channels = Channel.objects.filter(**filters).order_by("channel_number")
|
||||
# If user has ALL profiles or NO profiles, give unrestricted access
|
||||
if user_profile_count == 0:
|
||||
# No profile filtering - user sees all channels based on user_level
|
||||
filters = {"user_level__lte": user.user_level}
|
||||
if category_id is not None:
|
||||
filters["channel_group__id"] = category_id
|
||||
channels = Channel.objects.filter(**filters).order_by("channel_number")
|
||||
else:
|
||||
# User has specific limited profiles assigned
|
||||
filters = {
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
"channelprofilemembership__channel_profile__in": user.channel_profiles.all()
|
||||
}
|
||||
if category_id is not None:
|
||||
filters["channel_group__id"] = category_id
|
||||
channels = Channel.objects.filter(**filters).distinct().order_by("channel_number")
|
||||
else:
|
||||
if not category_id:
|
||||
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by("channel_number")
|
||||
|
|
@ -920,7 +948,7 @@ def xc_get_live_streams(request, user, category_id=None):
|
|||
)
|
||||
),
|
||||
"epg_channel_id": str(int(channel.channel_number)) if channel.channel_number.is_integer() else str(channel.channel_number),
|
||||
"added": int(time.time()), # @TODO: make this the actual created date
|
||||
"added": int(channel.created_at.timestamp()),
|
||||
"is_adult": 0,
|
||||
"category_id": str(channel.channel_group.id),
|
||||
"category_ids": [channel.channel_group.id],
|
||||
|
|
@ -941,17 +969,27 @@ def xc_get_epg(request, user, short=False):
|
|||
|
||||
channel = None
|
||||
if user.user_level < 10:
|
||||
filters = {
|
||||
"id": channel_id,
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
}
|
||||
user_profile_count = user.channel_profiles.count()
|
||||
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
filters["channelprofilemembership__channel_profile__in"] = channel_profiles
|
||||
# If user has ALL profiles or NO profiles, give unrestricted access
|
||||
if user_profile_count == 0:
|
||||
# No profile filtering - user sees all channels based on user_level
|
||||
channel = Channel.objects.filter(
|
||||
id=channel_id,
|
||||
user_level__lte=user.user_level
|
||||
).first()
|
||||
else:
|
||||
# User has specific limited profiles assigned
|
||||
filters = {
|
||||
"id": channel_id,
|
||||
"channelprofilemembership__enabled": True,
|
||||
"user_level__lte": user.user_level,
|
||||
"channelprofilemembership__channel_profile__in": user.channel_profiles.all()
|
||||
}
|
||||
channel = Channel.objects.filter(**filters).distinct().first()
|
||||
|
||||
channel = get_object_or_404(Channel, **filters)
|
||||
if not channel:
|
||||
raise Http404()
|
||||
else:
|
||||
channel = get_object_or_404(Channel, id=channel_id)
|
||||
|
||||
|
|
@ -1008,31 +1046,11 @@ def xc_get_vod_categories(user):
|
|||
|
||||
response = []
|
||||
|
||||
# Filter categories based on user's M3U accounts
|
||||
if user.user_level == 0:
|
||||
# For regular users, get categories from their accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
# Get M3U accounts accessible through user's profiles
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
else:
|
||||
m3u_accounts = []
|
||||
|
||||
# Get categories that have movie relations with these accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='movie',
|
||||
m3umovierelation__m3u_account__in=m3u_accounts
|
||||
).distinct().order_by(Lower("name"))
|
||||
else:
|
||||
# Admins can see all categories that have active movie relations
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='movie',
|
||||
m3umovierelation__m3u_account__is_active=True
|
||||
).distinct().order_by(Lower("name"))
|
||||
# All authenticated users get access to VOD from all active M3U accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='movie',
|
||||
m3umovierelation__m3u_account__is_active=True
|
||||
).distinct().order_by(Lower("name"))
|
||||
|
||||
for category in categories:
|
||||
response.append({
|
||||
|
|
@ -1051,22 +1069,9 @@ def xc_get_vod_streams(request, user, category_id=None):
|
|||
|
||||
streams = []
|
||||
|
||||
# Build filters for movies based on user access
|
||||
# All authenticated users get access to VOD from all active M3U accounts
|
||||
filters = {"m3u_relations__m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_relations__m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return [] # No accessible accounts
|
||||
|
||||
if category_id:
|
||||
filters["m3u_relations__category_id"] = category_id
|
||||
|
||||
|
|
@ -1127,28 +1132,11 @@ def xc_get_series_categories(user):
|
|||
|
||||
response = []
|
||||
|
||||
# Similar filtering as VOD categories but for series
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
else:
|
||||
m3u_accounts = []
|
||||
|
||||
# Get categories that have series relations with these accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='series',
|
||||
m3useriesrelation__m3u_account__in=m3u_accounts
|
||||
).distinct().order_by(Lower("name"))
|
||||
else:
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='series',
|
||||
m3useriesrelation__m3u_account__is_active=True
|
||||
).distinct().order_by(Lower("name"))
|
||||
# All authenticated users get access to series from all active M3U accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='series',
|
||||
m3useriesrelation__m3u_account__is_active=True
|
||||
).distinct().order_by(Lower("name"))
|
||||
|
||||
for category in categories:
|
||||
response.append({
|
||||
|
|
@ -1166,21 +1154,9 @@ def xc_get_series(request, user, category_id=None):
|
|||
|
||||
series_list = []
|
||||
|
||||
# Build filters based on user access
|
||||
# All authenticated users get access to series from all active M3U accounts
|
||||
filters = {"m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return []
|
||||
|
||||
if category_id:
|
||||
filters["category_id"] = category_id
|
||||
|
||||
|
|
@ -1228,21 +1204,9 @@ def xc_get_series_info(request, user, series_id):
|
|||
if not series_id:
|
||||
raise Http404()
|
||||
|
||||
# Get series relation with user access filtering
|
||||
# All authenticated users get access to series from all active M3U accounts
|
||||
filters = {"id": series_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
raise Http404()
|
||||
|
||||
try:
|
||||
series_relation = M3USeriesRelation.objects.select_related('series', 'series__logo').get(**filters)
|
||||
series = series_relation.series
|
||||
|
|
@ -1439,21 +1403,9 @@ def xc_get_vod_info(request, user, vod_id):
|
|||
if not vod_id:
|
||||
raise Http404()
|
||||
|
||||
# Get movie relation with user access filtering - use movie ID instead of relation ID
|
||||
# All authenticated users get access to VOD from all active M3U accounts
|
||||
filters = {"movie_id": vod_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
raise Http404()
|
||||
|
||||
try:
|
||||
# Order by account priority to get the best relation when multiple exist
|
||||
movie_relation = M3UMovieRelation.objects.select_related('movie', 'movie__logo').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||
|
|
@ -1602,22 +1554,9 @@ def xc_movie_stream(request, username, password, stream_id, extension):
|
|||
if custom_properties["xc_password"] != password:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
# Get movie relation based on user access level - use movie ID instead of relation ID
|
||||
# All authenticated users get access to VOD from all active M3U accounts
|
||||
filters = {"movie_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level < 10:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return JsonResponse({"error": "No accessible content"}, status=403)
|
||||
|
||||
try:
|
||||
# Order by account priority to get the best relation when multiple exist
|
||||
movie_relation = M3UMovieRelation.objects.select_related('movie').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||
|
|
@ -1652,22 +1591,9 @@ def xc_series_stream(request, username, password, stream_id, extension):
|
|||
if custom_properties["xc_password"] != password:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
# Get episode relation based on user access level - use episode ID instead of stream_id
|
||||
# All authenticated users get access to series/episodes from all active M3U accounts
|
||||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level < 10:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return JsonResponse({"error": "No accessible content"}, status=403)
|
||||
|
||||
try:
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
||||
except M3UEpisodeRelation.DoesNotExist:
|
||||
|
|
|
|||
|
|
@ -127,9 +127,9 @@ def stream_ts(request, channel_id):
|
|||
)
|
||||
ChannelService.stop_channel(channel_id)
|
||||
|
||||
# Use max retry attempts and connection timeout from config
|
||||
max_retries = ConfigHelper.max_retries()
|
||||
retry_timeout = ConfigHelper.connection_timeout()
|
||||
# Use fixed retry interval and timeout
|
||||
retry_timeout = 1.5 # 1.5 seconds total timeout
|
||||
retry_interval = 0.1 # 100ms between attempts
|
||||
wait_start_time = time.time()
|
||||
|
||||
stream_url = None
|
||||
|
|
@ -137,16 +137,18 @@ def stream_ts(request, channel_id):
|
|||
transcode = False
|
||||
profile_value = None
|
||||
error_reason = None
|
||||
attempt = 0
|
||||
|
||||
# Try to get a stream with configured retries
|
||||
for attempt in range(max_retries):
|
||||
# Try to get a stream with fixed interval retries
|
||||
while time.time() - wait_start_time < retry_timeout:
|
||||
attempt += 1
|
||||
stream_url, stream_user_agent, transcode, profile_value = (
|
||||
generate_stream_url(channel_id)
|
||||
)
|
||||
|
||||
if stream_url is not None:
|
||||
logger.info(
|
||||
f"[{client_id}] Successfully obtained stream for channel {channel_id}"
|
||||
f"[{client_id}] Successfully obtained stream for channel {channel_id} after {attempt} attempts"
|
||||
)
|
||||
break
|
||||
|
||||
|
|
@ -158,21 +160,15 @@ def stream_ts(request, channel_id):
|
|||
)
|
||||
break
|
||||
|
||||
# Don't exceed the overall connection timeout
|
||||
if time.time() - wait_start_time > retry_timeout:
|
||||
logger.warning(
|
||||
f"[{client_id}] Connection wait timeout exceeded ({retry_timeout}s)"
|
||||
# Wait 100ms before retrying
|
||||
elapsed_time = time.time() - wait_start_time
|
||||
remaining_time = retry_timeout - elapsed_time
|
||||
if remaining_time > retry_interval:
|
||||
logger.info(
|
||||
f"[{client_id}] Waiting {retry_interval*1000:.0f}ms for a connection to become available (attempt {attempt}, {remaining_time:.1f}s remaining)"
|
||||
)
|
||||
break
|
||||
|
||||
# Wait before retrying (using exponential backoff with a cap)
|
||||
wait_time = min(0.5 * (2**attempt), 2.0) # Caps at 2 seconds
|
||||
logger.info(
|
||||
f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})"
|
||||
)
|
||||
gevent.sleep(
|
||||
wait_time
|
||||
) # FIXED: Using gevent.sleep instead of time.sleep
|
||||
gevent.sleep(retry_interval)
|
||||
retry_interval += 0.025 # Increase wait time by 25ms for next attempt
|
||||
|
||||
if stream_url is None:
|
||||
# Make sure to release any stream locks that might have been acquired
|
||||
|
|
|
|||
|
|
@ -540,11 +540,9 @@ class RedisBackedVODConnection:
|
|||
}
|
||||
return {}
|
||||
|
||||
def cleanup(self, connection_manager=None):
|
||||
"""Clean up local resources and Redis state"""
|
||||
# Get connection state before cleanup to handle profile decrementing
|
||||
state = self._get_connection_state()
|
||||
|
||||
def cleanup(self, connection_manager=None, current_worker_id=None):
|
||||
"""Smart cleanup based on worker ownership and active streams"""
|
||||
# Always clean up local resources first
|
||||
if self.local_response:
|
||||
self.local_response.close()
|
||||
self.local_response = None
|
||||
|
|
@ -552,38 +550,72 @@ class RedisBackedVODConnection:
|
|||
self.local_session.close()
|
||||
self.local_session = None
|
||||
|
||||
# Remove from Redis
|
||||
if self.redis_client:
|
||||
try:
|
||||
# Use pipeline for atomic cleanup operations
|
||||
pipe = self.redis_client.pipeline()
|
||||
# Get current connection state to check ownership and active streams
|
||||
state = self._get_connection_state()
|
||||
|
||||
# 1. Remove main connection state (now contains consolidated data)
|
||||
pipe.delete(self.connection_key)
|
||||
if not state:
|
||||
logger.info(f"[{self.session_id}] No connection state found - local cleanup only")
|
||||
return
|
||||
|
||||
# 2. Remove distributed lock
|
||||
pipe.delete(self.lock_key)
|
||||
# Check if there are active streams
|
||||
if state.active_streams > 0:
|
||||
# There are active streams - check ownership
|
||||
if current_worker_id and state.worker_id == current_worker_id:
|
||||
logger.info(f"[{self.session_id}] Active streams present ({state.active_streams}) and we own them - local cleanup only")
|
||||
else:
|
||||
logger.info(f"[{self.session_id}] Active streams present ({state.active_streams}) but owned by worker {state.worker_id} - local cleanup only")
|
||||
return
|
||||
|
||||
# Execute all cleanup operations
|
||||
pipe.execute()
|
||||
# No active streams - we can clean up Redis state
|
||||
if not self.redis_client:
|
||||
logger.info(f"[{self.session_id}] No Redis client - local cleanup only")
|
||||
return
|
||||
|
||||
logger.info(f"[{self.session_id}] Cleaned up all Redis keys (consolidated connection state, locks)")
|
||||
# Acquire lock and do final check before cleanup to prevent race conditions
|
||||
if not self._acquire_lock():
|
||||
logger.warning(f"[{self.session_id}] Could not acquire lock for cleanup - skipping")
|
||||
return
|
||||
|
||||
# Decrement profile connections if we have the state and connection manager
|
||||
if state and state.m3u_profile_id and connection_manager:
|
||||
logger.info(f"[{self.session_id}] Decrementing profile connection count for profile {state.m3u_profile_id}")
|
||||
connection_manager._decrement_profile_connections(state.m3u_profile_id)
|
||||
logger.info(f"[{self.session_id}] Profile connection count decremented for profile {state.m3u_profile_id}")
|
||||
else:
|
||||
if not state:
|
||||
logger.warning(f"[{self.session_id}] No connection state found during cleanup - cannot decrement profile connections")
|
||||
elif not state.m3u_profile_id:
|
||||
logger.warning(f"[{self.session_id}] No profile ID in connection state - cannot decrement profile connections")
|
||||
elif not connection_manager:
|
||||
logger.warning(f"[{self.session_id}] No connection manager provided - cannot decrement profile connections")
|
||||
try:
|
||||
# Re-check active streams with lock held to prevent race conditions
|
||||
current_state = self._get_connection_state()
|
||||
if not current_state:
|
||||
logger.info(f"[{self.session_id}] Connection state no longer exists - cleanup already done")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[{self.session_id}] Error cleaning up Redis state: {e}")
|
||||
if current_state.active_streams > 0:
|
||||
logger.info(f"[{self.session_id}] Active streams now present ({current_state.active_streams}) - skipping cleanup")
|
||||
return
|
||||
|
||||
# Use pipeline for atomic cleanup operations
|
||||
pipe = self.redis_client.pipeline()
|
||||
|
||||
# 1. Remove main connection state (contains consolidated data)
|
||||
pipe.delete(self.connection_key)
|
||||
|
||||
# 2. Remove distributed lock (will be released below anyway)
|
||||
pipe.delete(self.lock_key)
|
||||
|
||||
# Execute all cleanup operations
|
||||
pipe.execute()
|
||||
|
||||
logger.info(f"[{self.session_id}] Cleaned up Redis keys (verified no active streams)")
|
||||
|
||||
# Decrement profile connections if we have the state and connection manager
|
||||
if state.m3u_profile_id and connection_manager:
|
||||
connection_manager._decrement_profile_connections(state.m3u_profile_id)
|
||||
logger.info(f"[{self.session_id}] Profile connection count decremented for profile {state.m3u_profile_id}")
|
||||
else:
|
||||
if not state.m3u_profile_id:
|
||||
logger.warning(f"[{self.session_id}] No profile ID in connection state - cannot decrement profile connections")
|
||||
elif not connection_manager:
|
||||
logger.warning(f"[{self.session_id}] No connection manager provided - cannot decrement profile connections")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[{self.session_id}] Error cleaning up Redis state: {e}")
|
||||
finally:
|
||||
# Always release the lock
|
||||
self._release_lock()
|
||||
|
||||
|
||||
# Modify the VODConnectionManager to use Redis-backed connections
|
||||
|
|
@ -694,6 +726,15 @@ class MultiWorkerVODConnectionManager:
|
|||
logger.info(f"[{client_id}] Worker {self.worker_id} - Found matching idle session: {matching_session_id}")
|
||||
effective_session_id = matching_session_id
|
||||
client_id = matching_session_id # Update client_id for logging consistency
|
||||
|
||||
# IMMEDIATELY reserve this session by incrementing active streams to prevent cleanup
|
||||
temp_connection = RedisBackedVODConnection(effective_session_id, self.redis_client)
|
||||
if temp_connection.increment_active_streams():
|
||||
logger.info(f"[{client_id}] Reserved idle session - incremented active streams")
|
||||
else:
|
||||
logger.warning(f"[{client_id}] Failed to reserve idle session - falling back to new session")
|
||||
effective_session_id = session_id
|
||||
matching_session_id = None # Clear the match so we create a new connection
|
||||
else:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - No matching idle session found, using new session")
|
||||
effective_session_id = session_id
|
||||
|
|
@ -761,14 +802,20 @@ class MultiWorkerVODConnectionManager:
|
|||
else:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Using existing Redis-backed connection")
|
||||
|
||||
# Update session activity in consolidated connection state
|
||||
# Transfer ownership to current worker and update session activity
|
||||
if redis_connection._acquire_lock():
|
||||
try:
|
||||
state = redis_connection._get_connection_state()
|
||||
if state:
|
||||
old_worker = state.worker_id
|
||||
state.last_activity = time.time()
|
||||
state.worker_id = self.worker_id # Track which worker last accessed this
|
||||
state.worker_id = self.worker_id # Transfer ownership to current worker
|
||||
redis_connection._save_connection_state(state)
|
||||
|
||||
if old_worker != self.worker_id:
|
||||
logger.info(f"[{client_id}] Ownership transferred from worker {old_worker} to {self.worker_id}")
|
||||
else:
|
||||
logger.debug(f"[{client_id}] Worker {self.worker_id} retaining ownership")
|
||||
finally:
|
||||
redis_connection._release_lock()
|
||||
|
||||
|
|
@ -788,8 +835,13 @@ class MultiWorkerVODConnectionManager:
|
|||
try:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
||||
|
||||
# Increment active streams
|
||||
redis_connection.increment_active_streams()
|
||||
# Increment active streams (unless we already did it for session reuse)
|
||||
if not matching_session_id:
|
||||
# New session - increment active streams
|
||||
redis_connection.increment_active_streams()
|
||||
else:
|
||||
# Reused session - we already incremented when reserving the session
|
||||
logger.debug(f"[{client_id}] Using pre-reserved session - active streams already incremented")
|
||||
|
||||
bytes_sent = 0
|
||||
chunk_count = 0
|
||||
|
|
@ -819,13 +871,13 @@ class MultiWorkerVODConnectionManager:
|
|||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
|
||||
# Schedule cleanup if no active streams after normal completion
|
||||
# Schedule smart cleanup if no active streams after normal completion
|
||||
if not redis_connection.has_active_streams():
|
||||
def delayed_cleanup():
|
||||
time.sleep(1) # Wait 1 second
|
||||
if not redis_connection.has_active_streams():
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Cleaning up idle Redis connection after normal completion")
|
||||
redis_connection.cleanup(connection_manager=self)
|
||||
# Smart cleanup: check active streams and ownership
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Checking for smart cleanup after normal completion")
|
||||
redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id)
|
||||
|
||||
import threading
|
||||
cleanup_thread = threading.Thread(target=delayed_cleanup)
|
||||
|
|
@ -838,13 +890,13 @@ class MultiWorkerVODConnectionManager:
|
|||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
|
||||
# Schedule cleanup if no active streams
|
||||
# Schedule smart cleanup if no active streams
|
||||
if not redis_connection.has_active_streams():
|
||||
def delayed_cleanup():
|
||||
time.sleep(1) # Wait 1 second
|
||||
if not redis_connection.has_active_streams():
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Cleaning up idle Redis connection")
|
||||
redis_connection.cleanup(connection_manager=self)
|
||||
# Smart cleanup: check active streams and ownership
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Checking for smart cleanup after client disconnect")
|
||||
redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id)
|
||||
|
||||
import threading
|
||||
cleanup_thread = threading.Thread(target=delayed_cleanup)
|
||||
|
|
@ -856,7 +908,8 @@ class MultiWorkerVODConnectionManager:
|
|||
if not decremented:
|
||||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
redis_connection.cleanup(connection_manager=self)
|
||||
# Smart cleanup on error - immediate cleanup since we're in error state
|
||||
redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id)
|
||||
yield b"Error: Stream interrupted"
|
||||
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -176,14 +176,15 @@ class VODStreamView(View):
|
|||
logger.error(f"[VOD-ERROR] No stream URL available for {content_type} {content_id}")
|
||||
return HttpResponse("No stream URL available", status=503)
|
||||
|
||||
# Get M3U profile
|
||||
m3u_profile = self._get_m3u_profile(m3u_account, profile_id)
|
||||
# Get M3U profile (returns profile and current connection count)
|
||||
profile_result = self._get_m3u_profile(m3u_account, profile_id, session_id)
|
||||
|
||||
if not m3u_profile:
|
||||
if not profile_result or not profile_result[0]:
|
||||
logger.error(f"[VOD-ERROR] No suitable M3U profile found for {content_type} {content_id}")
|
||||
return HttpResponse("No available stream", status=503)
|
||||
|
||||
logger.info(f"[VOD-PROFILE] Using M3U profile: {m3u_profile.id} (max_streams: {m3u_profile.max_streams}, current: {m3u_profile.current_viewers})")
|
||||
m3u_profile, current_connections = profile_result
|
||||
logger.info(f"[VOD-PROFILE] Using M3U profile: {m3u_profile.id} (max_streams: {m3u_profile.max_streams}, current: {current_connections})")
|
||||
|
||||
# Connection tracking is handled by the connection manager
|
||||
# Transform URL based on profile
|
||||
|
|
@ -279,11 +280,13 @@ class VODStreamView(View):
|
|||
logger.error(f"[VOD-HEAD] No stream URL available for {content_type} {content_id}")
|
||||
return HttpResponse("No stream URL available", status=503)
|
||||
|
||||
# Get M3U profile
|
||||
m3u_profile = self._get_m3u_profile(m3u_account, profile_id)
|
||||
if not m3u_profile:
|
||||
logger.error(f"[VOD-HEAD] No M3U profile found")
|
||||
return HttpResponse("Profile not found", status=404)
|
||||
# Get M3U profile (returns profile and current connection count)
|
||||
profile_result = self._get_m3u_profile(m3u_account, profile_id, session_id)
|
||||
if not profile_result or not profile_result[0]:
|
||||
logger.error(f"[VOD-HEAD] No M3U profile found or all profiles at capacity")
|
||||
return HttpResponse("No available stream", status=503)
|
||||
|
||||
m3u_profile, current_connections = profile_result
|
||||
|
||||
# Transform URL if needed
|
||||
final_stream_url = self._transform_url(stream_url, m3u_profile)
|
||||
|
|
@ -517,10 +520,63 @@ class VODStreamView(View):
|
|||
logger.error(f"[VOD-URL] Error getting stream URL from relation: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def _get_m3u_profile(self, m3u_account, profile_id):
|
||||
"""Get appropriate M3U profile for streaming"""
|
||||
def _get_m3u_profile(self, m3u_account, profile_id, session_id=None):
|
||||
"""Get appropriate M3U profile for streaming using Redis-based viewer counts
|
||||
|
||||
Args:
|
||||
m3u_account: M3UAccount instance
|
||||
profile_id: Optional specific profile ID requested
|
||||
session_id: Optional session ID to check for existing connections
|
||||
|
||||
Returns:
|
||||
tuple: (M3UAccountProfile, current_connections) or None if no profile found
|
||||
"""
|
||||
try:
|
||||
# If specific profile requested, try to use it
|
||||
from core.utils import RedisClient
|
||||
redis_client = RedisClient.get_client()
|
||||
|
||||
if not redis_client:
|
||||
logger.warning("Redis not available, falling back to default profile")
|
||||
default_profile = M3UAccountProfile.objects.filter(
|
||||
m3u_account=m3u_account,
|
||||
is_active=True,
|
||||
is_default=True
|
||||
).first()
|
||||
return (default_profile, 0) if default_profile else None
|
||||
|
||||
# Check if this session already has an active connection
|
||||
if session_id:
|
||||
persistent_connection_key = f"vod_persistent_connection:{session_id}"
|
||||
connection_data = redis_client.hgetall(persistent_connection_key)
|
||||
|
||||
if connection_data:
|
||||
# Decode Redis hash data
|
||||
decoded_data = {}
|
||||
for k, v in connection_data.items():
|
||||
k_str = k.decode('utf-8') if isinstance(k, bytes) else k
|
||||
v_str = v.decode('utf-8') if isinstance(v, bytes) else v
|
||||
decoded_data[k_str] = v_str
|
||||
|
||||
existing_profile_id = decoded_data.get('m3u_profile_id')
|
||||
if existing_profile_id:
|
||||
try:
|
||||
existing_profile = M3UAccountProfile.objects.get(
|
||||
id=int(existing_profile_id),
|
||||
m3u_account=m3u_account,
|
||||
is_active=True
|
||||
)
|
||||
# Get current connections for logging
|
||||
profile_connections_key = f"profile_connections:{existing_profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
logger.info(f"[PROFILE-SELECTION] Session {session_id} reusing existing profile {existing_profile.id}: {current_connections}/{existing_profile.max_streams} connections")
|
||||
return (existing_profile, current_connections)
|
||||
except (M3UAccountProfile.DoesNotExist, ValueError):
|
||||
logger.warning(f"[PROFILE-SELECTION] Session {session_id} has invalid profile ID {existing_profile_id}, selecting new profile")
|
||||
except Exception as e:
|
||||
logger.warning(f"[PROFILE-SELECTION] Error checking existing profile for session {session_id}: {e}")
|
||||
else:
|
||||
logger.debug(f"[PROFILE-SELECTION] Session {session_id} exists but has no profile ID stored") # If specific profile requested, try to use it
|
||||
if profile_id:
|
||||
try:
|
||||
profile = M3UAccountProfile.objects.get(
|
||||
|
|
@ -528,24 +584,46 @@ class VODStreamView(View):
|
|||
m3u_account=m3u_account,
|
||||
is_active=True
|
||||
)
|
||||
if profile.current_viewers < profile.max_streams or profile.max_streams == 0:
|
||||
return profile
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
pass
|
||||
# Check Redis-based current connections
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Find available profile ordered by current usage (least loaded first)
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
logger.info(f"[PROFILE-SELECTION] Using requested profile {profile.id}: {current_connections}/{profile.max_streams} connections")
|
||||
return (profile, current_connections)
|
||||
else:
|
||||
logger.warning(f"[PROFILE-SELECTION] Requested profile {profile.id} is at capacity: {current_connections}/{profile.max_streams}")
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
logger.warning(f"[PROFILE-SELECTION] Requested profile {profile_id} not found")
|
||||
|
||||
# Get active profiles ordered by priority (default first)
|
||||
m3u_profiles = M3UAccountProfile.objects.filter(
|
||||
m3u_account=m3u_account,
|
||||
is_active=True
|
||||
).order_by('current_viewers')
|
||||
)
|
||||
|
||||
default_profile = m3u_profiles.filter(is_default=True).first()
|
||||
if not default_profile:
|
||||
logger.error(f"[PROFILE-SELECTION] No default profile found for M3U account {m3u_account.id}")
|
||||
return None
|
||||
|
||||
# Check profiles in order: default first, then others
|
||||
profiles = [default_profile] + list(m3u_profiles.filter(is_default=False))
|
||||
|
||||
for profile in profiles:
|
||||
# Check if profile has available connection slots
|
||||
if profile.current_viewers < profile.max_streams or profile.max_streams == 0:
|
||||
return profile
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Fallback to default profile even if over limit
|
||||
return profiles.filter(is_default=True).first()
|
||||
# Check if profile has available connection slots
|
||||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
logger.info(f"[PROFILE-SELECTION] Selected profile {profile.id} ({profile.name}): {current_connections}/{profile.max_streams} connections")
|
||||
return (profile, current_connections)
|
||||
else:
|
||||
logger.debug(f"[PROFILE-SELECTION] Profile {profile.id} at capacity: {current_connections}/{profile.max_streams}")
|
||||
|
||||
# All profiles are at capacity - return None to trigger error response
|
||||
logger.error(f"[PROFILE-SELECTION] All profiles at capacity for M3U account {m3u_account.id}, rejecting request")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting M3U profile: {e}")
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ def refresh_vod_content(account_id):
|
|||
|
||||
logger.info(f"Batch VOD refresh completed for account {account.name} in {duration:.2f} seconds")
|
||||
|
||||
# Cleanup orphaned VOD content after refresh
|
||||
logger.info("Starting cleanup of orphaned VOD content")
|
||||
cleanup_result = cleanup_orphaned_vod_content(scan_start_time=start_time)
|
||||
# Cleanup orphaned VOD content after refresh (scoped to this account only)
|
||||
logger.info(f"Starting cleanup of orphaned VOD content for account {account.name}")
|
||||
cleanup_result = cleanup_orphaned_vod_content(account_id=account_id, scan_start_time=start_time)
|
||||
logger.info(f"VOD cleanup completed: {cleanup_result}")
|
||||
|
||||
# Send completion notification
|
||||
|
|
@ -303,7 +303,7 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
|
||||
# Prepare movie properties
|
||||
description = movie_data.get('description') or movie_data.get('plot') or ''
|
||||
rating = movie_data.get('rating') or movie_data.get('vote_average') or ''
|
||||
rating = normalize_rating(movie_data.get('rating') or movie_data.get('vote_average'))
|
||||
genre = movie_data.get('genre') or movie_data.get('category_name') or ''
|
||||
duration_secs = extract_duration_from_data(movie_data)
|
||||
trailer_raw = movie_data.get('trailer') or movie_data.get('youtube_trailer') or ''
|
||||
|
|
@ -608,7 +608,7 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
|
||||
# Prepare series properties
|
||||
description = series_data.get('plot', '')
|
||||
rating = series_data.get('rating', '')
|
||||
rating = normalize_rating(series_data.get('rating'))
|
||||
genre = series_data.get('genre', '')
|
||||
logo_url = series_data.get('cover') or ''
|
||||
|
||||
|
|
@ -896,6 +896,33 @@ def extract_duration_from_data(movie_data):
|
|||
return duration_secs
|
||||
|
||||
|
||||
def normalize_rating(rating_value):
|
||||
"""Normalize rating value by converting commas to decimals and validating as float"""
|
||||
if not rating_value:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Convert to string for processing
|
||||
rating_str = str(rating_value).strip()
|
||||
|
||||
if not rating_str or rating_str == '':
|
||||
return None
|
||||
|
||||
# Replace comma with decimal point (European format)
|
||||
rating_str = rating_str.replace(',', '.')
|
||||
|
||||
# Try to convert to float
|
||||
rating_float = float(rating_str)
|
||||
|
||||
# Return as string to maintain compatibility with existing code
|
||||
# but ensure it's a valid numeric format
|
||||
return str(rating_float)
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
# If conversion fails, discard the rating
|
||||
logger.debug(f"Invalid rating value discarded: {rating_value}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_year(date_string):
|
||||
"""Extract year from date string"""
|
||||
if not date_string:
|
||||
|
|
@ -1021,9 +1048,9 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
|
|||
if should_update_field(series.description, info.get('plot')):
|
||||
series.description = extract_string_from_array_or_string(info.get('plot'))
|
||||
updated = True
|
||||
if (info.get('rating') and str(info.get('rating')).strip() and
|
||||
(not series.rating or not str(series.rating).strip())):
|
||||
series.rating = info.get('rating')
|
||||
normalized_rating = normalize_rating(info.get('rating'))
|
||||
if normalized_rating and (not series.rating or not str(series.rating).strip()):
|
||||
series.rating = normalized_rating
|
||||
updated = True
|
||||
if should_update_field(series.genre, info.get('genre')):
|
||||
series.genre = extract_string_from_array_or_string(info.get('genre'))
|
||||
|
|
@ -1124,7 +1151,7 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
|
||||
# Extract episode metadata
|
||||
description = info.get('plot') or info.get('overview', '') if info else ''
|
||||
rating = info.get('rating', '') if info else ''
|
||||
rating = normalize_rating(info.get('rating')) if info else None
|
||||
air_date = extract_date_from_data(info) if info else None
|
||||
duration_secs = info.get('duration_secs') if info else None
|
||||
tmdb_id = info.get('tmdb_id') if info else None
|
||||
|
|
@ -1308,7 +1335,7 @@ def batch_refresh_series_episodes(account_id, series_ids=None):
|
|||
|
||||
|
||||
@shared_task
|
||||
def cleanup_orphaned_vod_content(stale_days=0, scan_start_time=None):
|
||||
def cleanup_orphaned_vod_content(stale_days=0, scan_start_time=None, account_id=None):
|
||||
"""Clean up VOD content that has no M3U relations or has stale relations"""
|
||||
from datetime import timedelta
|
||||
|
||||
|
|
@ -1318,30 +1345,44 @@ def cleanup_orphaned_vod_content(stale_days=0, scan_start_time=None):
|
|||
# Calculate cutoff date for stale relations
|
||||
cutoff_date = reference_time - timedelta(days=stale_days)
|
||||
|
||||
# Build base query filters
|
||||
base_filters = {'last_seen__lt': cutoff_date}
|
||||
if account_id:
|
||||
base_filters['m3u_account_id'] = account_id
|
||||
logger.info(f"Cleaning up stale VOD content for account {account_id}")
|
||||
else:
|
||||
logger.info("Cleaning up stale VOD content across all accounts")
|
||||
|
||||
# Clean up stale movie relations (haven't been seen in the specified days)
|
||||
stale_movie_relations = M3UMovieRelation.objects.filter(last_seen__lt=cutoff_date)
|
||||
stale_movie_relations = M3UMovieRelation.objects.filter(**base_filters)
|
||||
stale_movie_count = stale_movie_relations.count()
|
||||
stale_movie_relations.delete()
|
||||
|
||||
# Clean up stale series relations
|
||||
stale_series_relations = M3USeriesRelation.objects.filter(last_seen__lt=cutoff_date)
|
||||
stale_series_relations = M3USeriesRelation.objects.filter(**base_filters)
|
||||
stale_series_count = stale_series_relations.count()
|
||||
stale_series_relations.delete()
|
||||
|
||||
# Clean up stale episode relations
|
||||
stale_episode_relations = M3UEpisodeRelation.objects.filter(last_seen__lt=cutoff_date)
|
||||
stale_episode_relations = M3UEpisodeRelation.objects.filter(**base_filters)
|
||||
stale_episode_count = stale_episode_relations.count()
|
||||
stale_episode_relations.delete()
|
||||
|
||||
# Clean up movies with no relations (orphaned)
|
||||
orphaned_movies = Movie.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_movie_count = orphaned_movies.count()
|
||||
orphaned_movies.delete()
|
||||
# Clean up movies with no relations (orphaned) - only if no account_id specified (global cleanup)
|
||||
if not account_id:
|
||||
orphaned_movies = Movie.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_movie_count = orphaned_movies.count()
|
||||
orphaned_movies.delete()
|
||||
|
||||
# Clean up series with no relations (orphaned)
|
||||
orphaned_series = Series.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_series_count = orphaned_series.count()
|
||||
orphaned_series.delete()
|
||||
# Clean up series with no relations (orphaned) - only if no account_id specified (global cleanup)
|
||||
orphaned_series = Series.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_series_count = orphaned_series.count()
|
||||
orphaned_series.delete()
|
||||
else:
|
||||
# When cleaning up for specific account, we don't remove orphaned content
|
||||
# as other accounts might still reference it
|
||||
orphaned_movie_count = 0
|
||||
orphaned_series_count = 0
|
||||
|
||||
# Episodes will be cleaned up via CASCADE when series are deleted
|
||||
|
||||
|
|
@ -1783,8 +1824,9 @@ def refresh_movie_advanced_data(m3u_movie_relation_id, force_refresh=False):
|
|||
if info.get('plot') and info.get('plot') != movie.description:
|
||||
movie.description = info.get('plot')
|
||||
updated = True
|
||||
if info.get('rating') and info.get('rating') != movie.rating:
|
||||
movie.rating = info.get('rating')
|
||||
normalized_rating = normalize_rating(info.get('rating'))
|
||||
if normalized_rating and normalized_rating != movie.rating:
|
||||
movie.rating = normalized_rating
|
||||
updated = True
|
||||
if info.get('genre') and info.get('genre') != movie.genre:
|
||||
movie.genre = info.get('genre')
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def preload_core_settings(apps, schema_editor):
|
|||
CoreSettings.objects.create(
|
||||
key=slugify("M3U Hash Key"),
|
||||
name="M3U Hash Key",
|
||||
value="name,url,tvg_id",
|
||||
value="url",
|
||||
)
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
# core/models.py
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
|
|
@ -158,8 +159,10 @@ DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir")
|
|||
DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template")
|
||||
DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template")
|
||||
DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled")
|
||||
DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path")
|
||||
DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes")
|
||||
DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes")
|
||||
SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone")
|
||||
|
||||
|
||||
class CoreSettings(models.Model):
|
||||
|
|
@ -274,6 +277,27 @@ class CoreSettings(models.Model):
|
|||
except cls.DoesNotExist:
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_dvr_comskip_custom_path(cls):
|
||||
"""Return configured comskip.ini path or empty string if unset."""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def set_dvr_comskip_custom_path(cls, path: str | None):
|
||||
"""Persist the comskip.ini path setting, normalizing nulls to empty string."""
|
||||
value = (path or "").strip()
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=DVR_COMSKIP_CUSTOM_PATH_KEY,
|
||||
defaults={"name": "DVR Comskip Custom Path", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def get_dvr_pre_offset_minutes(cls):
|
||||
"""Minutes to start recording before scheduled start (default 0)."""
|
||||
|
|
@ -302,6 +326,30 @@ class CoreSettings(models.Model):
|
|||
except Exception:
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def get_system_time_zone(cls):
|
||||
"""Return configured system time zone or fall back to Django settings."""
|
||||
try:
|
||||
value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value
|
||||
if value:
|
||||
return value
|
||||
except cls.DoesNotExist:
|
||||
pass
|
||||
return getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
|
||||
@classmethod
|
||||
def set_system_time_zone(cls, tz_name: str | None):
|
||||
"""Persist the desired system time zone identifier."""
|
||||
value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=SYSTEM_TIME_ZONE_KEY,
|
||||
defaults={"name": "System Time Zone", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def get_dvr_series_rules(cls):
|
||||
"""Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}"""
|
||||
|
|
|
|||
|
|
@ -513,7 +513,7 @@ def rehash_streams(keys):
|
|||
|
||||
for obj in batch:
|
||||
# Generate new hash
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys)
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id)
|
||||
|
||||
# Check if this hash already exists in our tracking dict or in database
|
||||
if new_hash in hash_keys:
|
||||
|
|
|
|||
|
|
@ -225,6 +225,10 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"task": "core.tasks.scan_and_process_files", # Direct task call
|
||||
"schedule": 20.0, # Every 20 seconds
|
||||
},
|
||||
"maintain-recurring-recordings": {
|
||||
"task": "apps.channels.tasks.maintain_recurring_recordings",
|
||||
"schedule": 3600.0, # Once an hour ensure recurring schedules stay ahead
|
||||
},
|
||||
}
|
||||
|
||||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
|
|
|
|||
|
|
@ -4,11 +4,15 @@ ARG REPO_NAME=dispatcharr
|
|||
ARG BASE_TAG=base
|
||||
|
||||
# --- Build frontend ---
|
||||
FROM node:20 AS frontend-builder
|
||||
|
||||
FROM node:24 AS frontend-builder
|
||||
|
||||
WORKDIR /app/frontend
|
||||
COPY ./frontend /app/frontend
|
||||
RUN corepack enable && corepack prepare yarn@stable --activate && \
|
||||
yarn install && yarn build && \
|
||||
# remove any node_modules that may have been copied from the host (x86)
|
||||
RUN rm -rf node_modules || true; \
|
||||
npm install --no-audit --progress=false;
|
||||
RUN npm run build; \
|
||||
rm -rf node_modules .cache
|
||||
|
||||
# --- Redeclare build arguments for the next stage ---
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ services:
|
|||
- 9191:9191
|
||||
volumes:
|
||||
- dispatcharr_data:/data
|
||||
- ./data:/data
|
||||
environment:
|
||||
- DISPATCHARR_ENV=aio
|
||||
- REDIS_HOST=localhost
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ services:
|
|||
command: >
|
||||
bash -c "
|
||||
cd /app &&
|
||||
celery -A dispatcharr worker -l info
|
||||
nice -n 5 celery -A dispatcharr worker -l info
|
||||
"
|
||||
|
||||
db:
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ exec-before = python /app/scripts/wait_for_redis.py
|
|||
; Start Redis first
|
||||
attach-daemon = redis-server
|
||||
; Then start other services
|
||||
attach-daemon = celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = celery -A dispatcharr beat
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr beat
|
||||
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
|
||||
attach-daemon = cd /app/frontend && npm run dev
|
||||
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ exec-pre = python /app/scripts/wait_for_redis.py
|
|||
; Start Redis first
|
||||
attach-daemon = redis-server
|
||||
; Then start other services
|
||||
attach-daemon = celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = celery -A dispatcharr beat
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr beat
|
||||
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
|
||||
attach-daemon = cd /app/frontend && npm run dev
|
||||
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ exec-pre = python /app/scripts/wait_for_redis.py
|
|||
; Start Redis first
|
||||
attach-daemon = redis-server
|
||||
; Then start other services
|
||||
attach-daemon = celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = celery -A dispatcharr beat
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr worker --autoscale=6,1
|
||||
attach-daemon = nice -n 5 celery -A dispatcharr beat
|
||||
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
|
||||
|
||||
# Core settings
|
||||
|
|
|
|||
2229
frontend/package-lock.json
generated
2229
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "vite",
|
||||
"name": "frontend",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
|
|
@ -7,7 +7,9 @@
|
|||
"dev": "vite --host",
|
||||
"build": "vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
"preview": "vite preview",
|
||||
"test": "vitest --run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
|
|
@ -22,15 +24,15 @@
|
|||
"@mantine/hooks": "~8.0.1",
|
||||
"@mantine/notifications": "~8.0.1",
|
||||
"@tanstack/react-table": "^8.21.2",
|
||||
"allotment": "^1.20.3",
|
||||
"allotment": "^1.20.4",
|
||||
"dayjs": "^1.11.13",
|
||||
"immer": "^10.1.1",
|
||||
"formik": "^2.4.6",
|
||||
"hls.js": "^1.5.20",
|
||||
"lucide-react": "^0.511.0",
|
||||
"mpegts.js": "^1.8.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-draggable": "^4.4.6",
|
||||
"react-pro-sidebar": "^1.1.0",
|
||||
"react-router-dom": "^7.3.0",
|
||||
|
|
@ -44,14 +46,27 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.21.0",
|
||||
"@types/react": "^19.0.10",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@vitejs/plugin-react-swc": "^3.8.0",
|
||||
"@swc/core": "npm:@swc/wasm@1.13.20",
|
||||
"@swc/wasm": "^1.13.20",
|
||||
"@testing-library/dom": "^10.4.1",
|
||||
"@testing-library/jest-dom": "^6.8.0",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/react": "^19.1.0",
|
||||
"@types/react-dom": "^19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^4.1.0",
|
||||
"eslint": "^9.21.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.19",
|
||||
"globals": "^15.15.0",
|
||||
"jsdom": "^27.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"vite": "^6.2.0"
|
||||
"vite": "^6.2.0",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"resolutions": {
|
||||
"vite": "7.1.7",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ export const WebsocketProvider = ({ children }) => {
|
|||
const updateEPG = useEPGsStore((s) => s.updateEPG);
|
||||
const updateEPGProgress = useEPGsStore((s) => s.updateEPGProgress);
|
||||
|
||||
const playlists = usePlaylistsStore((s) => s.playlists);
|
||||
const updatePlaylist = usePlaylistsStore((s) => s.updatePlaylist);
|
||||
const applyMediaScanUpdate = useLibraryStore((s) => s.applyScanUpdate);
|
||||
|
||||
|
|
@ -288,10 +287,14 @@ export const WebsocketProvider = ({ children }) => {
|
|||
// Update the playlist status whenever we receive a status update
|
||||
// Not just when progress is 100% or status is pending_setup
|
||||
if (parsedEvent.data.status && parsedEvent.data.account) {
|
||||
// Check if playlists is an object with IDs as keys or an array
|
||||
const playlist = Array.isArray(playlists)
|
||||
? playlists.find((p) => p.id === parsedEvent.data.account)
|
||||
: playlists[parsedEvent.data.account];
|
||||
// Get fresh playlists from store to avoid stale state from React render cycle
|
||||
const currentPlaylists = usePlaylistsStore.getState().playlists;
|
||||
const isArray = Array.isArray(currentPlaylists);
|
||||
const playlist = isArray
|
||||
? currentPlaylists.find(
|
||||
(p) => p.id === parsedEvent.data.account
|
||||
)
|
||||
: currentPlaylists[parsedEvent.data.account];
|
||||
|
||||
if (playlist) {
|
||||
// When we receive a "success" status with 100% progress, this is a completed refresh
|
||||
|
|
@ -314,19 +317,19 @@ export const WebsocketProvider = ({ children }) => {
|
|||
'M3U refresh completed successfully:',
|
||||
updateData
|
||||
);
|
||||
fetchPlaylists(); // Refresh playlists to ensure UI is up-to-date
|
||||
fetchChannelProfiles(); // Ensure channel profiles are updated
|
||||
}
|
||||
|
||||
updatePlaylist(updateData);
|
||||
fetchPlaylists(); // Refresh playlists to ensure UI is up-to-date
|
||||
fetchChannelProfiles(); // Ensure channel profiles are updated
|
||||
} else {
|
||||
// Log when playlist can't be found for debugging purposes
|
||||
console.warn(
|
||||
`Received update for unknown playlist ID: ${parsedEvent.data.account}`,
|
||||
Array.isArray(playlists)
|
||||
? 'playlists is array'
|
||||
: 'playlists is object',
|
||||
Object.keys(playlists).length
|
||||
// Playlist not in store yet - this happens when backend sends websocket
|
||||
// updates immediately after creating the playlist, before the API response
|
||||
// returns. The frontend will receive a 'playlist_created' event shortly
|
||||
// which will trigger a fetchPlaylists() to sync the store.
|
||||
console.log(
|
||||
`Received update for playlist ID ${parsedEvent.data.account} not yet in store. ` +
|
||||
`Waiting for playlist_created event to sync...`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -371,6 +374,173 @@ export const WebsocketProvider = ({ children }) => {
|
|||
}
|
||||
break;
|
||||
|
||||
case 'epg_matching_progress': {
|
||||
const progress = parsedEvent.data;
|
||||
const id = 'epg-matching-progress';
|
||||
|
||||
if (progress.stage === 'starting') {
|
||||
notifications.show({
|
||||
id,
|
||||
title: 'EPG Matching in Progress',
|
||||
message: `Starting to match ${progress.total} channels...`,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.stage === 'matching') {
|
||||
let message = `Matched ${progress.matched} of ${progress.total} channels`;
|
||||
if (progress.remaining > 0) {
|
||||
message += ` (${progress.remaining} remaining)`;
|
||||
}
|
||||
if (progress.current_channel) {
|
||||
message += `\nCurrently processing: ${progress.current_channel}`;
|
||||
}
|
||||
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'EPG Matching in Progress',
|
||||
message,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.stage === 'completed') {
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'EPG Matching Complete',
|
||||
message: `Successfully matched ${progress.matched} of ${progress.total} channels (${progress.progress_percent}%)`,
|
||||
color: progress.matched > 0 ? 'green.5' : 'orange',
|
||||
loading: false,
|
||||
autoClose: 6000,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'epg_logo_setting_progress': {
|
||||
const progress = parsedEvent.data;
|
||||
const id = 'epg-logo-setting-progress';
|
||||
|
||||
if (progress.status === 'running' && progress.progress === 0) {
|
||||
// Initial message
|
||||
notifications.show({
|
||||
id,
|
||||
title: 'Setting Logos from EPG',
|
||||
message: `Processing ${progress.total} channels...`,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.status === 'running') {
|
||||
// Progress update
|
||||
let message = `Processed ${progress.progress} of ${progress.total} channels`;
|
||||
if (progress.updated_count !== undefined) {
|
||||
message += ` (${progress.updated_count} updated)`;
|
||||
}
|
||||
if (progress.created_logos_count !== undefined) {
|
||||
message += `, created ${progress.created_logos_count} logos`;
|
||||
}
|
||||
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'Setting Logos from EPG',
|
||||
message,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.status === 'completed') {
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'Logo Setting Complete',
|
||||
message: `Successfully updated ${progress.updated_count || 0} channel logos${progress.created_logos_count ? `, created ${progress.created_logos_count} new logos` : ''}`,
|
||||
color: progress.updated_count > 0 ? 'green.5' : 'orange',
|
||||
loading: false,
|
||||
autoClose: 6000,
|
||||
});
|
||||
// Refresh channels data and logos
|
||||
try {
|
||||
await API.requeryChannels();
|
||||
await useChannelsStore.getState().fetchChannels();
|
||||
|
||||
// Get updated channel data and extract logo IDs to load
|
||||
const channels = useChannelsStore.getState().channels;
|
||||
const logoIds = Object.values(channels)
|
||||
.filter((channel) => channel.logo_id)
|
||||
.map((channel) => channel.logo_id);
|
||||
|
||||
// Fetch the specific logos that were just assigned
|
||||
if (logoIds.length > 0) {
|
||||
await useLogosStore.getState().fetchLogosByIds(logoIds);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
'Failed to refresh channels after logo setting:',
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'epg_name_setting_progress': {
|
||||
const progress = parsedEvent.data;
|
||||
const id = 'epg-name-setting-progress';
|
||||
|
||||
if (progress.status === 'running' && progress.progress === 0) {
|
||||
// Initial message
|
||||
notifications.show({
|
||||
id,
|
||||
title: 'Setting Names from EPG',
|
||||
message: `Processing ${progress.total} channels...`,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.status === 'running') {
|
||||
// Progress update
|
||||
let message = `Processed ${progress.progress} of ${progress.total} channels`;
|
||||
if (progress.updated_count !== undefined) {
|
||||
message += ` (${progress.updated_count} updated)`;
|
||||
}
|
||||
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'Setting Names from EPG',
|
||||
message,
|
||||
color: 'blue.5',
|
||||
autoClose: false,
|
||||
withCloseButton: false,
|
||||
loading: true,
|
||||
});
|
||||
} else if (progress.status === 'completed') {
|
||||
notifications.update({
|
||||
id,
|
||||
title: 'Name Setting Complete',
|
||||
message: `Successfully updated ${progress.updated_count || 0} channel names from EPG data`,
|
||||
color: progress.updated_count > 0 ? 'green.5' : 'orange',
|
||||
loading: false,
|
||||
autoClose: 6000,
|
||||
});
|
||||
// Refresh channels data
|
||||
try {
|
||||
await API.requeryChannels();
|
||||
await useChannelsStore.getState().fetchChannels();
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
'Failed to refresh channels after name setting:',
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'm3u_profile_test':
|
||||
setProfilePreview(
|
||||
parsedEvent.data.search_preview,
|
||||
|
|
@ -613,6 +783,14 @@ export const WebsocketProvider = ({ children }) => {
|
|||
|
||||
break;
|
||||
|
||||
case 'playlist_created':
|
||||
// Backend signals that a new playlist has been created and we should refresh
|
||||
console.log(
|
||||
'Playlist created event received, refreshing playlists...'
|
||||
);
|
||||
fetchPlaylists();
|
||||
break;
|
||||
|
||||
case 'bulk_channel_creation_progress': {
|
||||
// Handle progress updates with persistent notifications like stream rehash
|
||||
const data = parsedEvent.data;
|
||||
|
|
|
|||
|
|
@ -516,6 +516,75 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async setChannelNamesFromEpg(channelIds) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/channels/channels/set-names-from-epg/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: { channel_ids: channelIds },
|
||||
}
|
||||
);
|
||||
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: response.message,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to start EPG name setting task', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async setChannelLogosFromEpg(channelIds) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/channels/channels/set-logos-from-epg/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: { channel_ids: channelIds },
|
||||
}
|
||||
);
|
||||
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: response.message,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to start EPG logo setting task', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async setChannelTvgIdsFromEpg(channelIds) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/channels/channels/set-tvg-ids-from-epg/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: { channel_ids: channelIds },
|
||||
}
|
||||
);
|
||||
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: response.message,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to start EPG TVG-ID setting task', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async assignChannelNumbers(channelIds, startingNum = 1) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/channels/assign/`, {
|
||||
|
|
@ -1437,12 +1506,18 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async matchEpg() {
|
||||
static async matchEpg(channelIds = null) {
|
||||
try {
|
||||
const requestBody = channelIds ? { channel_ids: channelIds } : {};
|
||||
|
||||
const response = await request(
|
||||
`${host}/api/channels/channels/match-epg/`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
}
|
||||
);
|
||||
|
||||
|
|
@ -1452,6 +1527,26 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async matchChannelEpg(channelId) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/channels/channels/${channelId}/match-epg/`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
|
||||
// Update the channel in the store with the refreshed data if provided
|
||||
if (response.channel) {
|
||||
useChannelsStore.getState().updateChannel(response.channel);
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to run EPG auto-match for channel', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async fetchActiveChannelStats() {
|
||||
try {
|
||||
const response = await request(`${host}/proxy/ts/status`);
|
||||
|
|
@ -1801,6 +1896,83 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async updateRecording(id, values) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recordings/${id}/`, {
|
||||
method: 'PATCH',
|
||||
body: values,
|
||||
});
|
||||
useChannelsStore.getState().fetchRecordings();
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to update recording ${id}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getComskipConfig() {
|
||||
try {
|
||||
return await request(`${host}/api/channels/dvr/comskip-config/`);
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve comskip configuration', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async uploadComskipIni(file) {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
return await request(`${host}/api/channels/dvr/comskip-config/`, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification('Failed to upload comskip.ini', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async listRecurringRules() {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve recurring DVR rules', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async createRecurringRule(payload) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/`, {
|
||||
method: 'POST',
|
||||
body: payload,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to create recurring DVR rule', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async updateRecurringRule(ruleId, payload) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/${ruleId}/`, {
|
||||
method: 'PATCH',
|
||||
body: payload,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to update recurring rule ${ruleId}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteRecurringRule(ruleId) {
|
||||
try {
|
||||
await request(`${host}/api/channels/recurring-rules/${ruleId}/`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to delete recurring rule ${ruleId}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteRecording(id) {
|
||||
try {
|
||||
await request(`${host}/api/channels/recordings/${id}/`, { method: 'DELETE' });
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ import ChannelGroupForm from './ChannelGroup';
|
|||
import usePlaylistsStore from '../../store/playlists';
|
||||
import logo from '../../images/logo.png';
|
||||
import { useChannelLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import useLogosStore from '../../store/logos';
|
||||
import LazyLogo from '../LazyLogo';
|
||||
import LogoForm from './Logo';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
|
|
@ -34,9 +36,9 @@ import {
|
|||
UnstyledButton,
|
||||
} from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
|
||||
import { ListOrdered, SquarePlus, SquareX, X, Zap } from 'lucide-react';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
import { Dropzone } from '@mantine/dropzone';
|
||||
|
||||
import { FixedSizeList as List } from 'react-window';
|
||||
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
|
||||
|
||||
|
|
@ -51,11 +53,14 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup);
|
||||
|
||||
const {
|
||||
logos,
|
||||
logos: channelLogos,
|
||||
ensureLogosLoaded,
|
||||
isLoading: logosLoading,
|
||||
} = useChannelLogoSelection();
|
||||
|
||||
// Import the full logos store for duplicate checking
|
||||
const allLogos = useLogosStore((s) => s.logos);
|
||||
|
||||
// Ensure logos are loaded when component mounts
|
||||
useEffect(() => {
|
||||
ensureLogosLoaded();
|
||||
|
|
@ -67,7 +72,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const tvgs = useEPGsStore((s) => s.tvgs);
|
||||
const tvgsById = useEPGsStore((s) => s.tvgsById);
|
||||
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [logoModalOpen, setLogoModalOpen] = useState(false);
|
||||
const [channelStreams, setChannelStreams] = useState([]);
|
||||
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
|
||||
const [epgPopoverOpened, setEpgPopoverOpened] = useState(false);
|
||||
|
|
@ -78,6 +83,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
const [autoMatchLoading, setAutoMatchLoading] = useState(false);
|
||||
const groupOptions = Object.values(channelGroups);
|
||||
|
||||
const addStream = (stream) => {
|
||||
|
|
@ -92,32 +98,196 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
setChannelStreams(Array.from(streamSet));
|
||||
};
|
||||
|
||||
const handleLogoChange = async (files) => {
|
||||
if (files.length === 1) {
|
||||
const file = files[0];
|
||||
const handleLogoSuccess = ({ logo }) => {
|
||||
if (logo && logo.id) {
|
||||
formik.setFieldValue('logo_id', logo.id);
|
||||
ensureLogosLoaded(); // Refresh logos
|
||||
}
|
||||
setLogoModalOpen(false);
|
||||
};
|
||||
|
||||
const handleAutoMatchEpg = async () => {
|
||||
// Only attempt auto-match for existing channels (editing mode)
|
||||
if (!channel || !channel.id) {
|
||||
notifications.show({
|
||||
title: 'Info',
|
||||
message: 'Auto-match is only available when editing existing channels.',
|
||||
color: 'blue',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
setAutoMatchLoading(true);
|
||||
try {
|
||||
const response = await API.matchChannelEpg(channel.id);
|
||||
|
||||
if (response.matched) {
|
||||
// Update the form with the new EPG data
|
||||
if (response.channel && response.channel.epg_data_id) {
|
||||
formik.setFieldValue('epg_data_id', response.channel.epg_data_id);
|
||||
}
|
||||
|
||||
// Validate file size on frontend first
|
||||
if (file.size > 5 * 1024 * 1024) {
|
||||
// 5MB
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'File too large. Maximum size is 5MB.',
|
||||
color: 'red',
|
||||
title: 'Success',
|
||||
message: response.message,
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
notifications.show({
|
||||
title: 'No Match Found',
|
||||
message: response.message,
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to auto-match EPG data',
|
||||
color: 'red',
|
||||
});
|
||||
console.error('Auto-match error:', error);
|
||||
} finally {
|
||||
setAutoMatchLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const retval = await API.uploadLogo(file);
|
||||
// Note: API.uploadLogo already adds the logo to the store, no need to fetch
|
||||
setLogoPreview(retval.cache_url);
|
||||
formik.setFieldValue('logo_id', retval.id);
|
||||
} catch (error) {
|
||||
console.error('Logo upload failed:', error);
|
||||
// Error notification is already handled in API.uploadLogo
|
||||
}
|
||||
const handleSetNameFromEpg = () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
message: 'Please select an EPG source first.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const tvg = tvgsById[epgDataId];
|
||||
if (tvg && tvg.name) {
|
||||
formik.setFieldValue('name', tvg.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `Channel name set to "${tvg.name}"`,
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
setLogoPreview(null);
|
||||
notifications.show({
|
||||
title: 'No Name Available',
|
||||
message: 'No name found in the selected EPG data.',
|
||||
color: 'orange',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleSetLogoFromEpg = async () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
message: 'Please select an EPG source first.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const tvg = tvgsById[epgDataId];
|
||||
if (!tvg || !tvg.icon_url) {
|
||||
notifications.show({
|
||||
title: 'No EPG Icon',
|
||||
message: 'EPG data does not have an icon URL.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to find a logo that matches the EPG icon URL - check ALL logos to avoid duplicates
|
||||
let matchingLogo = Object.values(allLogos).find(
|
||||
(logo) => logo.url === tvg.icon_url
|
||||
);
|
||||
|
||||
if (matchingLogo) {
|
||||
formik.setFieldValue('logo_id', matchingLogo.id);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `Logo set to "${matchingLogo.name}"`,
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
// Logo doesn't exist - create it
|
||||
notifications.show({
|
||||
id: 'creating-logo',
|
||||
title: 'Creating Logo',
|
||||
message: `Creating new logo from EPG icon URL...`,
|
||||
loading: true,
|
||||
});
|
||||
|
||||
try {
|
||||
const newLogoData = {
|
||||
name: tvg.name || `Logo for ${tvg.icon_url}`,
|
||||
url: tvg.icon_url,
|
||||
};
|
||||
|
||||
// Create logo by calling the Logo API directly
|
||||
const newLogo = await API.createLogo(newLogoData);
|
||||
|
||||
formik.setFieldValue('logo_id', newLogo.id);
|
||||
|
||||
notifications.update({
|
||||
id: 'creating-logo',
|
||||
title: 'Success',
|
||||
message: `Created and assigned new logo "${newLogo.name}"`,
|
||||
loading: false,
|
||||
color: 'green',
|
||||
autoClose: 5000,
|
||||
});
|
||||
} catch (createError) {
|
||||
notifications.update({
|
||||
id: 'creating-logo',
|
||||
title: 'Error',
|
||||
message: 'Failed to create logo from EPG icon URL',
|
||||
loading: false,
|
||||
color: 'red',
|
||||
autoClose: 5000,
|
||||
});
|
||||
throw createError;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to set logo from EPG data',
|
||||
color: 'red',
|
||||
});
|
||||
console.error('Set logo from EPG error:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSetTvgIdFromEpg = () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
message: 'Please select an EPG source first.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const tvg = tvgsById[epgDataId];
|
||||
if (tvg && tvg.tvg_id) {
|
||||
formik.setFieldValue('tvg_id', tvg.tvg_id);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `TVG-ID set to "${tvg.tvg_id}"`,
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
notifications.show({
|
||||
title: 'No TVG-ID Available',
|
||||
message: 'No TVG-ID found in the selected EPG data.',
|
||||
color: 'orange',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -248,9 +418,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
// Memoize logo options to prevent infinite re-renders during background loading
|
||||
const logoOptions = useMemo(() => {
|
||||
const options = [{ id: '0', name: 'Default' }].concat(Object.values(logos));
|
||||
const options = [{ id: '0', name: 'Default' }].concat(
|
||||
Object.values(channelLogos)
|
||||
);
|
||||
return options;
|
||||
}, [logos]); // Only depend on logos object
|
||||
}, [channelLogos]); // Only depend on channelLogos object
|
||||
|
||||
// Update the handler for when channel group modal is closed
|
||||
const handleChannelGroupModalClose = (newGroup) => {
|
||||
|
|
@ -306,11 +478,28 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Channel Name"
|
||||
label={
|
||||
<Group gap="xs">
|
||||
<span>Channel Name</span>
|
||||
{formik.values.epg_data_id && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
onClick={handleSetNameFromEpg}
|
||||
title="Set channel name from EPG data"
|
||||
p={0}
|
||||
h="auto"
|
||||
>
|
||||
Use EPG Name
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
}
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name ? formik.touched.name : ''}
|
||||
size="xs"
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
|
||||
<Flex gap="sm">
|
||||
|
|
@ -492,9 +681,27 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
<TextInput
|
||||
id="logo_id"
|
||||
name="logo_id"
|
||||
label="Logo"
|
||||
label={
|
||||
<Group gap="xs">
|
||||
<span>Logo</span>
|
||||
{formik.values.epg_data_id && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
onClick={handleSetLogoFromEpg}
|
||||
title="Find matching logo based on EPG icon URL"
|
||||
p={0}
|
||||
h="auto"
|
||||
>
|
||||
Use EPG Logo
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
}
|
||||
readOnly
|
||||
value={logos[formik.values.logo_id]?.name || 'Default'}
|
||||
value={
|
||||
channelLogos[formik.values.logo_id]?.name || 'Default'
|
||||
}
|
||||
onClick={() => {
|
||||
console.log(
|
||||
'Logo input clicked, setting popover opened to true'
|
||||
|
|
@ -601,42 +808,22 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
|
||||
<LazyLogo
|
||||
logoId={formik.values.logo_id}
|
||||
alt="channel logo"
|
||||
style={{ height: 40 }}
|
||||
/>
|
||||
<Stack gap="xs" align="center">
|
||||
<LazyLogo
|
||||
logoId={formik.values.logo_id}
|
||||
alt="channel logo"
|
||||
style={{ height: 40 }}
|
||||
/>
|
||||
</Stack>
|
||||
</Group>
|
||||
|
||||
<Group>
|
||||
<Divider size="xs" style={{ flex: 1 }} />
|
||||
<Text size="xs" c="dimmed">
|
||||
OR
|
||||
</Text>
|
||||
<Divider size="xs" style={{ flex: 1 }} />
|
||||
</Group>
|
||||
|
||||
<Stack>
|
||||
<Text size="sm">Upload Logo</Text>
|
||||
<Dropzone
|
||||
onDrop={handleLogoChange}
|
||||
onReject={(files) => console.log('rejected files', files)}
|
||||
maxSize={5 * 1024 ** 2}
|
||||
>
|
||||
<Group
|
||||
justify="center"
|
||||
gap="xl"
|
||||
mih={40}
|
||||
style={{ pointerEvents: 'none' }}
|
||||
>
|
||||
<Text size="sm" inline>
|
||||
Drag images here or click to select files
|
||||
</Text>
|
||||
</Group>
|
||||
</Dropzone>
|
||||
|
||||
<Center></Center>
|
||||
</Stack>
|
||||
<Button
|
||||
onClick={() => setLogoModalOpen(true)}
|
||||
fullWidth
|
||||
variant="default"
|
||||
>
|
||||
Upload or Create Logo
|
||||
</Button>
|
||||
</Stack>
|
||||
|
||||
<Divider size="sm" orientation="vertical" />
|
||||
|
|
@ -664,7 +851,23 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
<TextInput
|
||||
id="tvg_id"
|
||||
name="tvg_id"
|
||||
label="TVG-ID"
|
||||
label={
|
||||
<Group gap="xs">
|
||||
<span>TVG-ID</span>
|
||||
{formik.values.epg_data_id && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
onClick={handleSetTvgIdFromEpg}
|
||||
title="Set TVG-ID from EPG data"
|
||||
p={0}
|
||||
h="auto"
|
||||
>
|
||||
Use EPG TVG-ID
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
}
|
||||
value={formik.values.tvg_id}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.tvg_id ? formik.touched.tvg_id : ''}
|
||||
|
|
@ -707,6 +910,25 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
>
|
||||
Use Dummy
|
||||
</Button>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
color="blue"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleAutoMatchEpg();
|
||||
}}
|
||||
disabled={!channel || !channel.id}
|
||||
loading={autoMatchLoading}
|
||||
title={
|
||||
!channel || !channel.id
|
||||
? 'Auto-match is only available for existing channels'
|
||||
: 'Automatically match EPG data'
|
||||
}
|
||||
leftSection={<Zap size="14" />}
|
||||
>
|
||||
Auto Match
|
||||
</Button>
|
||||
</Group>
|
||||
}
|
||||
readOnly
|
||||
|
|
@ -767,6 +989,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
mb="xs"
|
||||
size="xs"
|
||||
autoFocus
|
||||
/>
|
||||
</Group>
|
||||
|
||||
|
|
@ -836,6 +1059,12 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
isOpen={channelGroupModelOpen}
|
||||
onClose={handleChannelGroupModalClose}
|
||||
/>
|
||||
|
||||
<LogoForm
|
||||
isOpen={logoModalOpen}
|
||||
onClose={() => setLogoModalOpen(false)}
|
||||
onSuccess={handleLogoSuccess}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -27,20 +27,36 @@ import {
|
|||
import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
|
||||
import { FixedSizeList as List } from 'react-window';
|
||||
import { useForm } from '@mantine/form';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
|
||||
import { useChannelLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import LazyLogo from '../LazyLogo';
|
||||
import logo from '../../images/logo.png';
|
||||
import ConfirmationDialog from '../ConfirmationDialog';
|
||||
import useWarningsStore from '../../store/warnings';
|
||||
|
||||
const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
||||
const theme = useMantineTheme();
|
||||
|
||||
const groupListRef = useRef(null);
|
||||
const logoListRef = useRef(null);
|
||||
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup);
|
||||
const {
|
||||
logos: channelLogos,
|
||||
ensureLogosLoaded,
|
||||
isLoading: logosLoading,
|
||||
} = useChannelLogoSelection();
|
||||
|
||||
useEffect(() => {
|
||||
ensureLogosLoaded();
|
||||
}, [ensureLogosLoaded]);
|
||||
|
||||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
|
||||
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
|
||||
const [selectedChannelGroup, setSelectedChannelGroup] = useState('-1');
|
||||
const [selectedLogoId, setSelectedLogoId] = useState('-1');
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [regexFind, setRegexFind] = useState('');
|
||||
const [regexReplace, setRegexReplace] = useState('');
|
||||
|
|
@ -49,10 +65,21 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
const [groupFilter, setGroupFilter] = useState('');
|
||||
const groupOptions = Object.values(channelGroups);
|
||||
|
||||
const [logoPopoverOpened, setLogoPopoverOpened] = useState(false);
|
||||
const [logoFilter, setLogoFilter] = useState('');
|
||||
// Confirmation dialog states
|
||||
const [confirmSetNamesOpen, setConfirmSetNamesOpen] = useState(false);
|
||||
const [confirmSetLogosOpen, setConfirmSetLogosOpen] = useState(false);
|
||||
const [confirmSetTvgIdsOpen, setConfirmSetTvgIdsOpen] = useState(false);
|
||||
const [confirmClearEpgsOpen, setConfirmClearEpgsOpen] = useState(false);
|
||||
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
|
||||
const form = useForm({
|
||||
mode: 'uncontrolled',
|
||||
initialValues: {
|
||||
channel_group: '(no change)',
|
||||
logo: '(no change)',
|
||||
stream_profile_id: '-1',
|
||||
user_level: '-1',
|
||||
},
|
||||
|
|
@ -70,6 +97,15 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
delete values.channel_group_id;
|
||||
}
|
||||
|
||||
if (selectedLogoId && selectedLogoId !== '-1') {
|
||||
if (selectedLogoId === '0') {
|
||||
values.logo_id = null;
|
||||
} else {
|
||||
values.logo_id = parseInt(selectedLogoId);
|
||||
}
|
||||
}
|
||||
delete values.logo;
|
||||
|
||||
// Handle stream profile ID - convert special values
|
||||
if (!values.stream_profile_id || values.stream_profile_id === '-1') {
|
||||
delete values.stream_profile_id;
|
||||
|
|
@ -134,6 +170,184 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleSetNamesFromEpg = async () => {
|
||||
if (!channelIds || channelIds.length === 0) {
|
||||
notifications.show({
|
||||
title: 'No Channels Selected',
|
||||
message: 'No channels to update.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip warning if suppressed
|
||||
if (isWarningSuppressed('batch-set-names-from-epg')) {
|
||||
return executeSetNamesFromEpg();
|
||||
}
|
||||
|
||||
setConfirmSetNamesOpen(true);
|
||||
};
|
||||
|
||||
const executeSetNamesFromEpg = async () => {
|
||||
try {
|
||||
// Start the backend task
|
||||
await API.setChannelNamesFromEpg(channelIds);
|
||||
|
||||
// The task will send WebSocket updates for progress
|
||||
// Just show that it started successfully
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: `Started setting names from EPG for ${channelIds.length} channels. Progress will be shown in notifications.`,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
// Close the modal since the task is now running in background
|
||||
setConfirmSetNamesOpen(false);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to start EPG name setting task:', error);
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to start EPG name setting task.',
|
||||
color: 'red',
|
||||
});
|
||||
setConfirmSetNamesOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSetLogosFromEpg = async () => {
|
||||
if (!channelIds || channelIds.length === 0) {
|
||||
notifications.show({
|
||||
title: 'No Channels Selected',
|
||||
message: 'No channels to update.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip warning if suppressed
|
||||
if (isWarningSuppressed('batch-set-logos-from-epg')) {
|
||||
return executeSetLogosFromEpg();
|
||||
}
|
||||
|
||||
setConfirmSetLogosOpen(true);
|
||||
};
|
||||
|
||||
const executeSetLogosFromEpg = async () => {
|
||||
try {
|
||||
// Start the backend task
|
||||
await API.setChannelLogosFromEpg(channelIds);
|
||||
|
||||
// The task will send WebSocket updates for progress
|
||||
// Just show that it started successfully
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: `Started setting logos from EPG for ${channelIds.length} channels. Progress will be shown in notifications.`,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
// Close the modal since the task is now running in background
|
||||
setConfirmSetLogosOpen(false);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to start EPG logo setting task:', error);
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to start EPG logo setting task.',
|
||||
color: 'red',
|
||||
});
|
||||
setConfirmSetLogosOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSetTvgIdsFromEpg = async () => {
|
||||
if (!channelIds || channelIds.length === 0) {
|
||||
notifications.show({
|
||||
title: 'No Channels Selected',
|
||||
message: 'No channels to update.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip warning if suppressed
|
||||
if (isWarningSuppressed('batch-set-tvg-ids-from-epg')) {
|
||||
return executeSetTvgIdsFromEpg();
|
||||
}
|
||||
|
||||
setConfirmSetTvgIdsOpen(true);
|
||||
};
|
||||
|
||||
const executeSetTvgIdsFromEpg = async () => {
|
||||
try {
|
||||
// Start the backend task
|
||||
await API.setChannelTvgIdsFromEpg(channelIds);
|
||||
|
||||
// The task will send WebSocket updates for progress
|
||||
// Just show that it started successfully
|
||||
notifications.show({
|
||||
title: 'Task Started',
|
||||
message: `Started setting TVG-IDs from EPG for ${channelIds.length} channels. Progress will be shown in notifications.`,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
// Close the modal since the task is now running in background
|
||||
setConfirmSetTvgIdsOpen(false);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to start EPG TVG-ID setting task:', error);
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to start EPG TVG-ID setting task.',
|
||||
color: 'red',
|
||||
});
|
||||
setConfirmSetTvgIdsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClearEpgs = async () => {
|
||||
if (!channelIds || channelIds.length === 0) {
|
||||
notifications.show({
|
||||
title: 'No Channels Selected',
|
||||
message: 'No channels to update.',
|
||||
color: 'orange',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip warning if suppressed
|
||||
if (isWarningSuppressed('batch-clear-epgs')) {
|
||||
return executeClearEpgs();
|
||||
}
|
||||
|
||||
setConfirmClearEpgsOpen(true);
|
||||
};
|
||||
|
||||
const executeClearEpgs = async () => {
|
||||
try {
|
||||
// Clear EPG assignments (set to null/dummy) using existing batchSetEPG API
|
||||
const associations = channelIds.map((id) => ({
|
||||
channel_id: id,
|
||||
epg_data_id: null,
|
||||
}));
|
||||
|
||||
await API.batchSetEPG(associations);
|
||||
|
||||
// batchSetEPG already shows a notification and refreshes channels
|
||||
// Close the modal
|
||||
setConfirmClearEpgsOpen(false);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to clear EPG assignments:', error);
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to clear EPG assignments.',
|
||||
color: 'red',
|
||||
});
|
||||
setConfirmClearEpgsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
// useEffect(() => {
|
||||
// // const sameStreamProfile = channels.every(
|
||||
// // (channel) => channel.stream_profile_id == channels[0].stream_profile_id
|
||||
|
|
@ -174,6 +388,18 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
),
|
||||
];
|
||||
|
||||
const logoOptions = useMemo(() => {
|
||||
return [
|
||||
{ id: '-1', name: '(no change)' },
|
||||
{ id: '0', name: 'Use Default', isDefault: true },
|
||||
...Object.values(channelLogos),
|
||||
];
|
||||
}, [channelLogos]);
|
||||
|
||||
const filteredLogos = logoOptions.filter((logo) =>
|
||||
logo.name.toLowerCase().includes(logoFilter.toLowerCase())
|
||||
);
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
|
@ -183,7 +409,7 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
<Modal
|
||||
opened={isOpen}
|
||||
onClose={onClose}
|
||||
size={"lg"}
|
||||
size={'lg'}
|
||||
title={
|
||||
<Group gap="5">
|
||||
<ListOrdered size="20" />
|
||||
|
|
@ -197,7 +423,9 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
<Stack gap="5" style={{ flex: 1 }}>
|
||||
<Paper withBorder p="xs" radius="md">
|
||||
<Group justify="space-between" align="center" mb={6}>
|
||||
<Text size="sm" fw={600}>Channel Name</Text>
|
||||
<Text size="sm" fw={600}>
|
||||
Channel Name
|
||||
</Text>
|
||||
</Group>
|
||||
<Group align="end" gap="xs" wrap="nowrap">
|
||||
<TextInput
|
||||
|
|
@ -224,6 +452,55 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
/>
|
||||
</Paper>
|
||||
|
||||
<Paper withBorder p="xs" radius="md">
|
||||
<Group justify="space-between" align="center" mb={6}>
|
||||
<Text size="sm" fw={600}>
|
||||
EPG Operations
|
||||
</Text>
|
||||
</Group>
|
||||
<Group gap="xs" wrap="nowrap">
|
||||
<Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
onClick={handleSetNamesFromEpg}
|
||||
style={{ flex: 1 }}
|
||||
>
|
||||
Set Names from EPG
|
||||
</Button>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
onClick={handleSetLogosFromEpg}
|
||||
style={{ flex: 1 }}
|
||||
>
|
||||
Set Logos from EPG
|
||||
</Button>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
onClick={handleSetTvgIdsFromEpg}
|
||||
style={{ flex: 1 }}
|
||||
>
|
||||
Set TVG-IDs from EPG
|
||||
</Button>
|
||||
</Group>
|
||||
<Group gap="xs" wrap="nowrap" mt="xs">
|
||||
<Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
color="red"
|
||||
onClick={handleClearEpgs}
|
||||
style={{ flex: 1 }}
|
||||
>
|
||||
Clear EPG (Set to Dummy)
|
||||
</Button>
|
||||
</Group>
|
||||
<Text size="xs" c="dimmed" mt="xs">
|
||||
Updates channel names, logos, and TVG-IDs based on their
|
||||
assigned EPG data, or clear EPG assignments to use dummy EPG
|
||||
</Text>
|
||||
</Paper>
|
||||
|
||||
<Popover
|
||||
opened={groupPopoverOpened}
|
||||
onChange={setGroupPopoverOpened}
|
||||
|
|
@ -345,6 +622,163 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
|
||||
<Group style={{ width: '100%' }} align="flex-end" gap="xs">
|
||||
<Popover
|
||||
opened={logoPopoverOpened}
|
||||
onChange={setLogoPopoverOpened}
|
||||
withArrow
|
||||
>
|
||||
<Popover.Target>
|
||||
<TextInput
|
||||
label="Logo"
|
||||
readOnly
|
||||
{...form.getInputProps('logo')}
|
||||
key={form.key('logo')}
|
||||
onClick={() => setLogoPopoverOpened(true)}
|
||||
size="xs"
|
||||
style={{ flex: 1 }}
|
||||
rightSection={
|
||||
selectedLogoId !== '-1' && (
|
||||
<ActionIcon
|
||||
size="xs"
|
||||
variant="subtle"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setSelectedLogoId('-1');
|
||||
form.setValues({ logo: '(no change)' });
|
||||
}}
|
||||
>
|
||||
<X size={12} />
|
||||
</ActionIcon>
|
||||
)
|
||||
}
|
||||
/>
|
||||
</Popover.Target>
|
||||
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
|
||||
<Group>
|
||||
<TextInput
|
||||
placeholder="Filter"
|
||||
value={logoFilter}
|
||||
onChange={(event) =>
|
||||
setLogoFilter(event.currentTarget.value)
|
||||
}
|
||||
mb="xs"
|
||||
size="xs"
|
||||
/>
|
||||
{logosLoading && (
|
||||
<Text size="xs" c="dimmed">
|
||||
Loading...
|
||||
</Text>
|
||||
)}
|
||||
</Group>
|
||||
<ScrollArea style={{ height: 200 }}>
|
||||
{filteredLogos.length === 0 ? (
|
||||
<Center style={{ height: 200 }}>
|
||||
<Text size="sm" c="dimmed">
|
||||
{logoFilter
|
||||
? 'No logos match your filter'
|
||||
: 'No logos available'}
|
||||
</Text>
|
||||
</Center>
|
||||
) : (
|
||||
<List
|
||||
height={200}
|
||||
itemCount={filteredLogos.length}
|
||||
itemSize={55}
|
||||
style={{ width: '100%' }}
|
||||
ref={logoListRef}
|
||||
>
|
||||
{({ index, style }) => {
|
||||
const item = filteredLogos[index];
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
...style,
|
||||
cursor: 'pointer',
|
||||
padding: '5px',
|
||||
borderRadius: '4px',
|
||||
}}
|
||||
onClick={() => {
|
||||
setSelectedLogoId(item.id);
|
||||
form.setValues({
|
||||
logo: item.name,
|
||||
});
|
||||
setLogoPopoverOpened(false);
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
e.currentTarget.style.backgroundColor =
|
||||
'rgb(68, 68, 68)';
|
||||
}}
|
||||
onMouseLeave={(e) => {
|
||||
e.currentTarget.style.backgroundColor =
|
||||
'transparent';
|
||||
}}
|
||||
>
|
||||
<Center
|
||||
style={{
|
||||
flexDirection: 'column',
|
||||
gap: '2px',
|
||||
}}
|
||||
>
|
||||
{item.isDefault ? (
|
||||
<img
|
||||
src={logo}
|
||||
height="30"
|
||||
style={{
|
||||
maxWidth: 80,
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
alt="Default Logo"
|
||||
/>
|
||||
) : item.id > 0 ? (
|
||||
<img
|
||||
src={item.cache_url || logo}
|
||||
height="30"
|
||||
style={{
|
||||
maxWidth: 80,
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
alt={item.name || 'Logo'}
|
||||
onError={(e) => {
|
||||
if (e.target.src !== logo) {
|
||||
e.target.src = logo;
|
||||
}
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<Box h={30} />
|
||||
)}
|
||||
<Text
|
||||
size="xs"
|
||||
c="dimmed"
|
||||
ta="center"
|
||||
style={{
|
||||
maxWidth: 80,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
whiteSpace: 'nowrap',
|
||||
}}
|
||||
>
|
||||
{item.name}
|
||||
</Text>
|
||||
</Center>
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
</List>
|
||||
)}
|
||||
</ScrollArea>
|
||||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
{selectedLogoId > 0 && (
|
||||
<LazyLogo
|
||||
logoId={selectedLogoId}
|
||||
alt="channel logo"
|
||||
style={{ height: 24, marginBottom: 5 }}
|
||||
/>
|
||||
)}
|
||||
</Group>
|
||||
|
||||
<Select
|
||||
id="stream_profile_id"
|
||||
label="Stream Profile"
|
||||
|
|
@ -396,6 +830,90 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
isOpen={channelGroupModelOpen}
|
||||
onClose={handleChannelGroupModalClose}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmSetNamesOpen}
|
||||
onClose={() => setConfirmSetNamesOpen(false)}
|
||||
onConfirm={executeSetNamesFromEpg}
|
||||
title="Confirm Set Names from EPG"
|
||||
message={
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to set names from EPG for ${channelIds?.length || 0} selected channels?
|
||||
|
||||
This will replace the current channel names with the names from their assigned EPG data.
|
||||
|
||||
This action cannot be undone.`}
|
||||
</div>
|
||||
}
|
||||
confirmLabel="Set Names"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="batch-set-names-from-epg"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmSetLogosOpen}
|
||||
onClose={() => setConfirmSetLogosOpen(false)}
|
||||
onConfirm={executeSetLogosFromEpg}
|
||||
title="Confirm Set Logos from EPG"
|
||||
message={
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to set logos from EPG for ${channelIds?.length || 0} selected channels?
|
||||
|
||||
This will replace the current channel logos with logos from their assigned EPG data. New logos will be created if needed.
|
||||
|
||||
This action cannot be undone.`}
|
||||
</div>
|
||||
}
|
||||
confirmLabel="Set Logos"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="batch-set-logos-from-epg"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmSetTvgIdsOpen}
|
||||
onClose={() => setConfirmSetTvgIdsOpen(false)}
|
||||
onConfirm={executeSetTvgIdsFromEpg}
|
||||
title="Confirm Set TVG-IDs from EPG"
|
||||
message={
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to set TVG-IDs from EPG for ${channelIds?.length || 0} selected channels?
|
||||
|
||||
This will replace the current TVG-IDs with the TVG-IDs from their assigned EPG data.
|
||||
|
||||
This action cannot be undone.`}
|
||||
</div>
|
||||
}
|
||||
confirmLabel="Set TVG-IDs"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="batch-set-tvg-ids-from-epg"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmClearEpgsOpen}
|
||||
onClose={() => setConfirmClearEpgsOpen(false)}
|
||||
onConfirm={executeClearEpgs}
|
||||
title="Confirm Clear EPG Assignments"
|
||||
message={
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to clear EPG assignments for ${channelIds?.length || 0} selected channels?
|
||||
|
||||
This will set all selected channels to use dummy EPG data.
|
||||
|
||||
This action cannot be undone.`}
|
||||
</div>
|
||||
}
|
||||
confirmLabel="Clear EPGs"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="batch-clear-epgs"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
@ -403,7 +921,7 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
export default ChannelBatchForm;
|
||||
|
||||
// Lightweight inline preview component to visualize rename results for a subset
|
||||
const RegexPreview = ({ channelIds, find, replace}) => {
|
||||
const RegexPreview = ({ channelIds, find, replace }) => {
|
||||
const channelsMap = useChannelsStore((s) => s.channels);
|
||||
const previewItems = useMemo(() => {
|
||||
const items = [];
|
||||
|
|
@ -412,7 +930,8 @@ const RegexPreview = ({ channelIds, find, replace}) => {
|
|||
let re;
|
||||
try {
|
||||
re = new RegExp(find, flags);
|
||||
} catch (e) {
|
||||
} catch (error) {
|
||||
console.error('Invalid regex:', error);
|
||||
return [{ before: 'Invalid regex', after: '' }];
|
||||
}
|
||||
for (let i = 0; i < Math.min(channelIds.length, 25); i++) {
|
||||
|
|
@ -431,20 +950,41 @@ const RegexPreview = ({ channelIds, find, replace}) => {
|
|||
return (
|
||||
<Box mt={8}>
|
||||
<Text size="xs" c="dimmed" mb={4}>
|
||||
Preview (first {Math.min(channelIds.length, 25)} of {channelIds.length} selected)
|
||||
Preview (first {Math.min(channelIds.length, 25)} of {channelIds.length}{' '}
|
||||
selected)
|
||||
</Text>
|
||||
<ScrollArea h={120} offsetScrollbars>
|
||||
<Stack gap={4}>
|
||||
{previewItems.length === 0 ? (
|
||||
<Text size="xs" c="dimmed">No changes with current pattern.</Text>
|
||||
<Text size="xs" c="dimmed">
|
||||
No changes with current pattern.
|
||||
</Text>
|
||||
) : (
|
||||
previewItems.map((row, idx) => (
|
||||
<Group key={idx} gap={8} wrap="nowrap" align="center">
|
||||
<Text size="xs" style={{ flex: 1, whiteSpace: 'nowrap', overflow: 'hidden', textOverflow: 'ellipsis' }}>
|
||||
<Text
|
||||
size="xs"
|
||||
style={{
|
||||
flex: 1,
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{row.before}
|
||||
</Text>
|
||||
<Text size="xs" c="gray.6">→</Text>
|
||||
<Text size="xs" style={{ flex: 1, whiteSpace: 'nowrap', overflow: 'hidden', textOverflow: 'ellipsis' }}>
|
||||
<Text size="xs" c="gray.6">
|
||||
→
|
||||
</Text>
|
||||
<Text
|
||||
size="xs"
|
||||
style={{
|
||||
flex: 1,
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{row.after}
|
||||
</Text>
|
||||
</Group>
|
||||
|
|
|
|||
|
|
@ -106,13 +106,12 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates
|
||||
} else {
|
||||
// File was uploaded and logo was already created
|
||||
// Note: API.uploadLogo already calls addLogo() in the store, so no need to call onSuccess
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
// No onSuccess call needed - API.uploadLogo already updated the store
|
||||
onSuccess?.({ type: 'create', logo: uploadResponse });
|
||||
}
|
||||
onClose();
|
||||
} catch (error) {
|
||||
|
|
@ -211,6 +210,24 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleUrlBlur = (event) => {
|
||||
const urlValue = event.target.value;
|
||||
if (urlValue) {
|
||||
try {
|
||||
const url = new URL(urlValue);
|
||||
const pathname = url.pathname;
|
||||
const filename = pathname.substring(pathname.lastIndexOf('/') + 1);
|
||||
const nameWithoutExtension = filename.replace(/\.[^/.]+$/, '');
|
||||
if (nameWithoutExtension) {
|
||||
formik.setFieldValue('name', nameWithoutExtension);
|
||||
}
|
||||
} catch (error) {
|
||||
// If the URL is invalid, do nothing.
|
||||
// The validation schema will catch this.
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Clean up object URLs when component unmounts or preview changes
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
|
|
@ -323,6 +340,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
placeholder="https://example.com/logo.png"
|
||||
{...formik.getFieldProps('url')}
|
||||
onChange={handleUrlChange}
|
||||
onBlur={handleUrlBlur}
|
||||
error={formik.touched.url && formik.errors.url}
|
||||
disabled={!!selectedFile} // Disable when file is selected
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ import {
|
|||
} from '@mantine/core';
|
||||
import M3UGroupFilter from './M3UGroupFilter';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import usePlaylistsStore from '../../store/playlists';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
|
|
@ -40,7 +39,6 @@ const M3U = ({
|
|||
|
||||
const userAgents = useUserAgentsStore((s) => s.userAgents);
|
||||
const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups);
|
||||
const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists);
|
||||
const fetchEPGs = useEPGsStore((s) => s.fetchEPGs);
|
||||
const fetchCategories = useVODStore((s) => s.fetchCategories);
|
||||
|
||||
|
|
@ -61,7 +59,7 @@ const M3U = ({
|
|||
is_active: true,
|
||||
max_streams: 0,
|
||||
refresh_interval: 24,
|
||||
account_type: 'STD',
|
||||
account_type: 'XC',
|
||||
create_epg: false,
|
||||
username: '',
|
||||
password: '',
|
||||
|
|
@ -171,8 +169,14 @@ const M3U = ({
|
|||
return;
|
||||
}
|
||||
|
||||
// Fetch the updated playlist details (this also updates the store via API)
|
||||
const updatedPlaylist = await API.getPlaylist(newPlaylist.id);
|
||||
await Promise.all([fetchChannelGroups(), fetchPlaylists(), fetchEPGs()]);
|
||||
|
||||
// Note: We don't call fetchPlaylists() here because API.addPlaylist()
|
||||
// already added the playlist to the store. Calling fetchPlaylists() creates
|
||||
// a race condition where the store is temporarily cleared/replaced while
|
||||
// websocket updates for the new playlist's refresh task are arriving.
|
||||
await Promise.all([fetchChannelGroups(), fetchEPGs()]);
|
||||
|
||||
// If this is an XC account with VOD enabled, also fetch VOD categories
|
||||
if (values.account_type === 'XC' && values.enable_vod) {
|
||||
|
|
@ -199,6 +203,11 @@ const M3U = ({
|
|||
|
||||
const closeGroupFilter = () => {
|
||||
setGroupFilterModalOpen(false);
|
||||
// After group filter setup for a new account, reset everything
|
||||
form.reset();
|
||||
setFile(null);
|
||||
setPlaylist(null);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const closeFilter = () => {
|
||||
|
|
|
|||
|
|
@ -51,8 +51,7 @@ const M3UFilter = ({ filter = null, m3u, isOpen, onClose }) => {
|
|||
values.custom_properties = setCustomProperty(
|
||||
filter ? filter.custom_properties : {},
|
||||
'case_sensitive',
|
||||
values.case_sensitive,
|
||||
true
|
||||
values.case_sensitive
|
||||
);
|
||||
|
||||
delete values.case_sensitive;
|
||||
|
|
|
|||
|
|
@ -1,117 +1,424 @@
|
|||
// Modal.js
|
||||
import React from 'react';
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import dayjs from 'dayjs';
|
||||
import API from '../../api';
|
||||
import { Button, Modal, Flex, Select, Alert } from '@mantine/core';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import { DateTimePicker } from '@mantine/dates';
|
||||
import {
|
||||
Alert,
|
||||
Button,
|
||||
Modal,
|
||||
Select,
|
||||
Stack,
|
||||
SegmentedControl,
|
||||
MultiSelect,
|
||||
Group,
|
||||
TextInput,
|
||||
} from '@mantine/core';
|
||||
import { DateTimePicker, TimeInput, DatePickerInput } from '@mantine/dates';
|
||||
import { CircleAlert } from 'lucide-react';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
||||
const DVR = ({ recording = null, channel = null, isOpen, onClose }) => {
|
||||
const DAY_OPTIONS = [
|
||||
{ value: '6', label: 'Sun' },
|
||||
{ value: '0', label: 'Mon' },
|
||||
{ value: '1', label: 'Tue' },
|
||||
{ value: '2', label: 'Wed' },
|
||||
{ value: '3', label: 'Thu' },
|
||||
{ value: '4', label: 'Fri' },
|
||||
{ value: '5', label: 'Sat' },
|
||||
];
|
||||
|
||||
const asDate = (value) => {
|
||||
if (!value) return null;
|
||||
if (value instanceof Date) return value;
|
||||
const parsed = new Date(value);
|
||||
return Number.isNaN(parsed.getTime()) ? null : parsed;
|
||||
};
|
||||
|
||||
const toIsoIfDate = (value) => {
|
||||
const dt = asDate(value);
|
||||
return dt ? dt.toISOString() : value;
|
||||
};
|
||||
|
||||
// Accepts "h:mm A"/"hh:mm A"/"HH:mm"/Date, returns "HH:mm"
|
||||
const toTimeString = (value) => {
|
||||
if (!value) return '00:00';
|
||||
if (typeof value === 'string') {
|
||||
const parsed = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A', 'HH:mm:ss'], true);
|
||||
if (parsed.isValid()) return parsed.format('HH:mm');
|
||||
return value;
|
||||
}
|
||||
const dt = asDate(value);
|
||||
if (!dt) return '00:00';
|
||||
return dayjs(dt).format('HH:mm');
|
||||
};
|
||||
|
||||
const toDateString = (value) => {
|
||||
const dt = asDate(value);
|
||||
if (!dt) return null;
|
||||
const year = dt.getFullYear();
|
||||
const month = String(dt.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(dt.getDate()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createRoundedDate = (minutesAhead = 0) => {
|
||||
const dt = new Date();
|
||||
dt.setSeconds(0);
|
||||
dt.setMilliseconds(0);
|
||||
dt.setMinutes(Math.ceil(dt.getMinutes() / 30) * 30);
|
||||
if (minutesAhead) dt.setMinutes(dt.getMinutes() + minutesAhead);
|
||||
return dt;
|
||||
};
|
||||
|
||||
// robust onChange for TimeInput (string or event)
|
||||
const timeChange = (setter) => (valOrEvent) => {
|
||||
if (typeof valOrEvent === 'string') setter(valOrEvent);
|
||||
else if (valOrEvent?.currentTarget) setter(valOrEvent.currentTarget.value);
|
||||
};
|
||||
|
||||
const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) => {
|
||||
const channels = useChannelsStore((s) => s.channels);
|
||||
const fetchRecordings = useChannelsStore((s) => s.fetchRecordings);
|
||||
const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules);
|
||||
|
||||
let startTime = new Date();
|
||||
startTime.setMinutes(Math.ceil(startTime.getMinutes() / 30) * 30);
|
||||
startTime.setSeconds(0);
|
||||
startTime.setMilliseconds(0);
|
||||
const [mode, setMode] = useState('single');
|
||||
const [submitting, setSubmitting] = useState(false);
|
||||
|
||||
let endTime = new Date();
|
||||
endTime.setMinutes(Math.ceil(endTime.getMinutes() / 30) * 30);
|
||||
endTime.setSeconds(0);
|
||||
endTime.setMilliseconds(0);
|
||||
endTime.setHours(endTime.getHours() + 1);
|
||||
const defaultStart = createRoundedDate();
|
||||
const defaultEnd = createRoundedDate(60);
|
||||
const defaultDate = new Date();
|
||||
|
||||
const form = useForm({
|
||||
mode: 'uncontrolled',
|
||||
// One-time form
|
||||
const singleForm = useForm({
|
||||
mode: 'controlled',
|
||||
initialValues: {
|
||||
channel_id: recording
|
||||
? recording.channel_id
|
||||
: channel
|
||||
? `${channel.id}`
|
||||
: '',
|
||||
start_time: recording ? recording.start_time : startTime,
|
||||
end_time: recording ? recording.end_time : endTime,
|
||||
channel_id: recording ? `${recording.channel}` : channel ? `${channel.id}` : '',
|
||||
start_time: recording ? asDate(recording.start_time) || defaultStart : defaultStart,
|
||||
end_time: recording ? asDate(recording.end_time) || defaultEnd : defaultEnd,
|
||||
},
|
||||
|
||||
validate: {
|
||||
channel_id: isNotEmpty('Select a channel'),
|
||||
start_time: isNotEmpty('Select a start time'),
|
||||
end_time: isNotEmpty('Select an end time'),
|
||||
end_time: (value, values) => {
|
||||
const start = asDate(values.start_time);
|
||||
const end = asDate(value);
|
||||
if (!end) return 'Select an end time';
|
||||
if (start && end <= start) return 'End time must be after start time';
|
||||
return null;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const onSubmit = async () => {
|
||||
const { channel_id, ...values } = form.getValues();
|
||||
// Recurring form stores times as "HH:mm" strings for stable editing
|
||||
const recurringForm = useForm({
|
||||
mode: 'controlled',
|
||||
validateInputOnChange: false,
|
||||
validateInputOnBlur: true,
|
||||
initialValues: {
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
days_of_week: [],
|
||||
start_time: dayjs(defaultStart).format('HH:mm'),
|
||||
end_time: dayjs(defaultEnd).format('HH:mm'),
|
||||
rule_name: '',
|
||||
start_date: defaultDate,
|
||||
end_date: defaultDate,
|
||||
},
|
||||
validate: {
|
||||
channel_id: isNotEmpty('Select a channel'),
|
||||
days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'),
|
||||
start_time: (value) => (value ? null : 'Select a start time'),
|
||||
end_time: (value, values) => {
|
||||
if (!value) return 'Select an end time';
|
||||
const start = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true);
|
||||
const end = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true);
|
||||
if (start.isValid() && end.isValid() && end.diff(start, 'minute') === 0) {
|
||||
return 'End time must differ from start time';
|
||||
}
|
||||
return null;
|
||||
},
|
||||
end_date: (value, values) => {
|
||||
const end = asDate(value);
|
||||
const start = asDate(values.start_date);
|
||||
if (!end) return 'Select an end date';
|
||||
if (start && end < start) return 'End date cannot be before start date';
|
||||
return null;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(values);
|
||||
useEffect(() => {
|
||||
if (!isOpen) return;
|
||||
|
||||
await API.createRecording({
|
||||
...values,
|
||||
channel: channel_id,
|
||||
const freshStart = createRoundedDate();
|
||||
const freshEnd = createRoundedDate(60);
|
||||
const freshDate = new Date();
|
||||
|
||||
if (recording && recording.id) {
|
||||
setMode('single');
|
||||
singleForm.setValues({
|
||||
channel_id: `${recording.channel}`,
|
||||
start_time: asDate(recording.start_time) || defaultStart,
|
||||
end_time: asDate(recording.end_time) || defaultEnd,
|
||||
});
|
||||
} else {
|
||||
// Reset forms for fresh open
|
||||
singleForm.setValues({
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
start_time: freshStart,
|
||||
end_time: freshEnd,
|
||||
});
|
||||
|
||||
const startStr = dayjs(freshStart).format('HH:mm');
|
||||
recurringForm.setValues({
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
days_of_week: [],
|
||||
start_time: startStr,
|
||||
end_time: dayjs(freshEnd).format('HH:mm'),
|
||||
rule_name: channel?.name || '',
|
||||
start_date: freshDate,
|
||||
end_date: freshDate,
|
||||
});
|
||||
setMode('single');
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isOpen, recording, channel]);
|
||||
|
||||
const channelOptions = useMemo(() => {
|
||||
const list = Object.values(channels || {});
|
||||
list.sort((a, b) => {
|
||||
const aNum = Number(a.channel_number) || 0;
|
||||
const bNum = Number(b.channel_number) || 0;
|
||||
if (aNum === bNum) return (a.name || '').localeCompare(b.name || '');
|
||||
return aNum - bNum;
|
||||
});
|
||||
return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` }));
|
||||
}, [channels]);
|
||||
|
||||
form.reset();
|
||||
onClose();
|
||||
const resetForms = () => {
|
||||
singleForm.reset();
|
||||
recurringForm.reset();
|
||||
setMode('single');
|
||||
};
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
const handleClose = () => {
|
||||
resetForms();
|
||||
onClose?.();
|
||||
};
|
||||
|
||||
const handleSingleSubmit = async (values) => {
|
||||
try {
|
||||
setSubmitting(true);
|
||||
if (recording && recording.id) {
|
||||
await API.updateRecording(recording.id, {
|
||||
channel: values.channel_id,
|
||||
start_time: toIsoIfDate(values.start_time),
|
||||
end_time: toIsoIfDate(values.end_time),
|
||||
});
|
||||
notifications.show({
|
||||
title: 'Recording updated',
|
||||
message: 'Recording schedule updated successfully',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
} else {
|
||||
await API.createRecording({
|
||||
channel: values.channel_id,
|
||||
start_time: toIsoIfDate(values.start_time),
|
||||
end_time: toIsoIfDate(values.end_time),
|
||||
});
|
||||
notifications.show({
|
||||
title: 'Recording scheduled',
|
||||
message: 'One-time recording added to DVR queue',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
}
|
||||
await fetchRecordings();
|
||||
handleClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to create recording', error);
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRecurringSubmit = async (values) => {
|
||||
try {
|
||||
setSubmitting(true);
|
||||
await API.createRecurringRule({
|
||||
channel: values.channel_id,
|
||||
days_of_week: (values.days_of_week || []).map((d) => Number(d)),
|
||||
start_time: toTimeString(values.start_time),
|
||||
end_time: toTimeString(values.end_time),
|
||||
start_date: toDateString(values.start_date),
|
||||
end_date: toDateString(values.end_date),
|
||||
name: values.rule_name?.trim() || '',
|
||||
});
|
||||
|
||||
await Promise.all([fetchRecurringRules(), fetchRecordings()]);
|
||||
notifications.show({
|
||||
title: 'Recurring rule saved',
|
||||
message: 'Future slots will be scheduled automatically',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
handleClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to create recurring rule', error);
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const onSubmit =
|
||||
mode === 'single'
|
||||
? singleForm.onSubmit(handleSingleSubmit)
|
||||
: recurringForm.onSubmit(handleRecurringSubmit);
|
||||
|
||||
if (!isOpen) return null;
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="Channel Recording">
|
||||
<Modal opened={isOpen} onClose={handleClose} title="Channel Recording">
|
||||
<Alert
|
||||
variant="light"
|
||||
color="yellow"
|
||||
title="Scheduling Conflicts"
|
||||
icon={<CircleAlert />}
|
||||
style={{ paddingBottom: 5 }}
|
||||
style={{ paddingBottom: 5, marginBottom: 12 }}
|
||||
>
|
||||
Recordings may fail if active streams or overlapping recordings use up
|
||||
all available streams
|
||||
Recordings may fail if active streams or overlapping recordings use up all available tuners.
|
||||
</Alert>
|
||||
|
||||
<form onSubmit={form.onSubmit(onSubmit)}>
|
||||
<Select
|
||||
{...form.getInputProps('channel_id')}
|
||||
label="Channel"
|
||||
key={form.key('channel_id')}
|
||||
searchable
|
||||
data={Object.values(channels).map((channel) => ({
|
||||
value: `${channel.id}`,
|
||||
label: channel.name,
|
||||
}))}
|
||||
<Stack gap="md">
|
||||
<SegmentedControl
|
||||
value={mode}
|
||||
onChange={setMode}
|
||||
disabled={Boolean(recording && recording.id)}
|
||||
data={[
|
||||
{ value: 'single', label: 'One-time' },
|
||||
{ value: 'recurring', label: 'Recurring' },
|
||||
]}
|
||||
/>
|
||||
|
||||
<DateTimePicker
|
||||
{...form.getInputProps('start_time')}
|
||||
key={form.key('start_time')}
|
||||
id="start_time"
|
||||
label="Start Time"
|
||||
valueFormat="M/DD/YYYY hh:mm A"
|
||||
/>
|
||||
<form onSubmit={onSubmit}>
|
||||
<Stack gap="md">
|
||||
{mode === 'single' ? (
|
||||
<Select
|
||||
{...singleForm.getInputProps('channel_id')}
|
||||
key={singleForm.key('channel_id')}
|
||||
label="Channel"
|
||||
placeholder="Select channel"
|
||||
searchable
|
||||
data={channelOptions}
|
||||
/>
|
||||
) : (
|
||||
<Select
|
||||
{...recurringForm.getInputProps('channel_id')}
|
||||
key={recurringForm.key('channel_id')}
|
||||
label="Channel"
|
||||
placeholder="Select channel"
|
||||
searchable
|
||||
data={channelOptions}
|
||||
/>
|
||||
)}
|
||||
|
||||
<DateTimePicker
|
||||
{...form.getInputProps('end_time')}
|
||||
key={form.key('end_time')}
|
||||
id="end_time"
|
||||
label="End Time"
|
||||
valueFormat="M/DD/YYYY hh:mm A"
|
||||
/>
|
||||
{mode === 'single' ? (
|
||||
<>
|
||||
<DateTimePicker
|
||||
{...singleForm.getInputProps('start_time')}
|
||||
key={singleForm.key('start_time')}
|
||||
label="Start"
|
||||
valueFormat="MMM D, YYYY h:mm A"
|
||||
timeInputProps={{ format: '12', withSeconds: false, amLabel: 'AM', pmLabel: 'PM' }}
|
||||
/>
|
||||
<DateTimePicker
|
||||
{...singleForm.getInputProps('end_time')}
|
||||
key={singleForm.key('end_time')}
|
||||
label="End"
|
||||
valueFormat="MMM D, YYYY h:mm A"
|
||||
timeInputProps={{ format: '12', withSeconds: false, amLabel: 'AM', pmLabel: 'PM' }}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<TextInput
|
||||
{...recurringForm.getInputProps('rule_name')}
|
||||
key={recurringForm.key('rule_name')}
|
||||
label="Rule name"
|
||||
placeholder="Morning News, Football Sundays, ..."
|
||||
/>
|
||||
<MultiSelect
|
||||
{...recurringForm.getInputProps('days_of_week')}
|
||||
key={recurringForm.key('days_of_week')}
|
||||
label="Every"
|
||||
placeholder="Select days"
|
||||
data={DAY_OPTIONS}
|
||||
searchable
|
||||
clearable
|
||||
nothingFoundMessage="No match"
|
||||
/>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button
|
||||
type="submit"
|
||||
variant="contained"
|
||||
size="small"
|
||||
disabled={form.submitting}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
<Group grow>
|
||||
<DatePickerInput
|
||||
label="Start date"
|
||||
value={recurringForm.values.start_date}
|
||||
onChange={(value) =>
|
||||
recurringForm.setFieldValue('start_date', value || new Date())
|
||||
}
|
||||
valueFormat="MMM D, YYYY"
|
||||
/>
|
||||
<DatePickerInput
|
||||
label="End date"
|
||||
value={recurringForm.values.end_date}
|
||||
onChange={(value) => recurringForm.setFieldValue('end_date', value)}
|
||||
valueFormat="MMM D, YYYY"
|
||||
minDate={recurringForm.values.start_date || undefined}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group grow>
|
||||
<TimeInput
|
||||
label="Start time"
|
||||
value={recurringForm.values.start_time}
|
||||
onChange={timeChange((val) =>
|
||||
recurringForm.setFieldValue('start_time', toTimeString(val))
|
||||
)}
|
||||
onBlur={() => recurringForm.validateField('start_time')}
|
||||
withSeconds={false}
|
||||
format="12" // shows 12-hour (so "00:00" renders "12:00 AM")
|
||||
inputMode="numeric"
|
||||
amLabel="AM"
|
||||
pmLabel="PM"
|
||||
/>
|
||||
|
||||
<TimeInput
|
||||
label="End time"
|
||||
value={recurringForm.values.end_time}
|
||||
onChange={timeChange((val) =>
|
||||
recurringForm.setFieldValue('end_time', toTimeString(val))
|
||||
)}
|
||||
onBlur={() => recurringForm.validateField('end_time')}
|
||||
withSeconds={false}
|
||||
format="12"
|
||||
inputMode="numeric"
|
||||
amLabel="AM"
|
||||
pmLabel="PM"
|
||||
/>
|
||||
</Group>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Group justify="flex-end">
|
||||
<Button type="submit" loading={submitting}>
|
||||
{mode === 'single' ? 'Schedule Recording' : 'Save Rule'}
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</form>
|
||||
</Stack>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default DVR;
|
||||
export default RecordingModal;
|
||||
|
|
|
|||
|
|
@ -143,11 +143,18 @@ const ChannelTableHeader = ({
|
|||
const matchEpg = async () => {
|
||||
try {
|
||||
// Hit our new endpoint that triggers the fuzzy matching Celery task
|
||||
await API.matchEpg();
|
||||
|
||||
notifications.show({
|
||||
title: 'EPG matching task started!',
|
||||
});
|
||||
// If channels are selected, only match those; otherwise match all
|
||||
if (selectedTableIds.length > 0) {
|
||||
await API.matchEpg(selectedTableIds);
|
||||
notifications.show({
|
||||
title: `EPG matching task started for ${selectedTableIds.length} selected channel(s)!`,
|
||||
});
|
||||
} else {
|
||||
await API.matchEpg();
|
||||
notifications.show({
|
||||
title: 'EPG matching task started for all channels without EPG!',
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
notifications.show(`Error: ${err.message}`);
|
||||
}
|
||||
|
|
@ -298,7 +305,11 @@ const ChannelTableHeader = ({
|
|||
disabled={authUser.user_level != USER_LEVELS.ADMIN}
|
||||
onClick={matchEpg}
|
||||
>
|
||||
<Text size="xs">Auto-Match</Text>
|
||||
<Text size="xs">
|
||||
{selectedTableIds.length > 0
|
||||
? `Auto-Match (${selectedTableIds.length} selected)`
|
||||
: 'Auto-Match EPG'}
|
||||
</Text>
|
||||
</Menu.Item>
|
||||
|
||||
<Menu.Item
|
||||
|
|
|
|||
|
|
@ -355,9 +355,7 @@ const M3UTable = () => {
|
|||
};
|
||||
|
||||
const editPlaylist = async (playlist = null) => {
|
||||
if (playlist) {
|
||||
setPlaylist(playlist);
|
||||
}
|
||||
setPlaylist(playlist);
|
||||
setPlaylistModalOpen(true);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ const StreamsTable = () => {
|
|||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
whiteSpace: 'pre',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
|
|
@ -301,7 +301,7 @@ const StreamsTable = () => {
|
|||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
whiteSpace: 'pre',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,10 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import API from '../api';
|
||||
import useSettingsStore from '../store/settings';
|
||||
import useUserAgentsStore from '../store/userAgents';
|
||||
|
|
@ -14,6 +20,7 @@ import {
|
|||
Flex,
|
||||
Group,
|
||||
Loader,
|
||||
FileInput,
|
||||
MultiSelect,
|
||||
Select,
|
||||
Stack,
|
||||
|
|
@ -24,6 +31,7 @@ import {
|
|||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import UserAgentsTable from '../components/tables/UserAgentsTable';
|
||||
import StreamProfilesTable from '../components/tables/StreamProfilesTable';
|
||||
import useLocalStorage from '../hooks/useLocalStorage';
|
||||
|
|
@ -42,6 +50,140 @@ import useLibraryStore from '../store/library';
|
|||
import LibraryFormModal from '../components/library/LibraryFormModal';
|
||||
import { Pencil, Plus, RefreshCcw, Trash2 } from 'lucide-react';
|
||||
|
||||
const TIMEZONE_FALLBACKS = [
|
||||
'UTC',
|
||||
'America/New_York',
|
||||
'America/Chicago',
|
||||
'America/Denver',
|
||||
'America/Los_Angeles',
|
||||
'America/Phoenix',
|
||||
'America/Anchorage',
|
||||
'Pacific/Honolulu',
|
||||
'Europe/London',
|
||||
'Europe/Paris',
|
||||
'Europe/Berlin',
|
||||
'Europe/Madrid',
|
||||
'Europe/Warsaw',
|
||||
'Europe/Moscow',
|
||||
'Asia/Dubai',
|
||||
'Asia/Kolkata',
|
||||
'Asia/Shanghai',
|
||||
'Asia/Tokyo',
|
||||
'Asia/Seoul',
|
||||
'Australia/Sydney',
|
||||
];
|
||||
|
||||
const getSupportedTimeZones = () => {
|
||||
try {
|
||||
if (typeof Intl.supportedValuesOf === 'function') {
|
||||
return Intl.supportedValuesOf('timeZone');
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Unable to enumerate supported time zones:', error);
|
||||
}
|
||||
return TIMEZONE_FALLBACKS;
|
||||
};
|
||||
|
||||
const getTimeZoneOffsetMinutes = (date, timeZone) => {
|
||||
try {
|
||||
const dtf = new Intl.DateTimeFormat('en-US', {
|
||||
timeZone,
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
hourCycle: 'h23',
|
||||
});
|
||||
const parts = dtf.formatToParts(date).reduce((acc, part) => {
|
||||
if (part.type !== 'literal') acc[part.type] = part.value;
|
||||
return acc;
|
||||
}, {});
|
||||
const asUTC = Date.UTC(
|
||||
Number(parts.year),
|
||||
Number(parts.month) - 1,
|
||||
Number(parts.day),
|
||||
Number(parts.hour),
|
||||
Number(parts.minute),
|
||||
Number(parts.second)
|
||||
);
|
||||
return (asUTC - date.getTime()) / 60000;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to compute offset for ${timeZone}:`, error);
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
const formatOffset = (minutes) => {
|
||||
const rounded = Math.round(minutes);
|
||||
const sign = rounded < 0 ? '-' : '+';
|
||||
const absolute = Math.abs(rounded);
|
||||
const hours = String(Math.floor(absolute / 60)).padStart(2, '0');
|
||||
const mins = String(absolute % 60).padStart(2, '0');
|
||||
return `UTC${sign}${hours}:${mins}`;
|
||||
};
|
||||
|
||||
const buildTimeZoneOptions = (preferredZone) => {
|
||||
const zones = getSupportedTimeZones();
|
||||
const referenceYear = new Date().getUTCFullYear();
|
||||
const janDate = new Date(Date.UTC(referenceYear, 0, 1, 12, 0, 0));
|
||||
const julDate = new Date(Date.UTC(referenceYear, 6, 1, 12, 0, 0));
|
||||
|
||||
const options = zones
|
||||
.map((zone) => {
|
||||
const janOffset = getTimeZoneOffsetMinutes(janDate, zone);
|
||||
const julOffset = getTimeZoneOffsetMinutes(julDate, zone);
|
||||
const currentOffset = getTimeZoneOffsetMinutes(new Date(), zone);
|
||||
const minOffset = Math.min(janOffset, julOffset);
|
||||
const maxOffset = Math.max(janOffset, julOffset);
|
||||
const usesDst = minOffset !== maxOffset;
|
||||
const labelParts = [`now ${formatOffset(currentOffset)}`];
|
||||
if (usesDst) {
|
||||
labelParts.push(
|
||||
`DST range ${formatOffset(minOffset)} to ${formatOffset(maxOffset)}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
value: zone,
|
||||
label: `${zone} (${labelParts.join(' | ')})`,
|
||||
numericOffset: minOffset,
|
||||
};
|
||||
})
|
||||
.sort((a, b) => {
|
||||
if (a.numericOffset !== b.numericOffset) {
|
||||
return a.numericOffset - b.numericOffset;
|
||||
}
|
||||
return a.value.localeCompare(b.value);
|
||||
});
|
||||
if (
|
||||
preferredZone &&
|
||||
!options.some((option) => option.value === preferredZone)
|
||||
) {
|
||||
const currentOffset = getTimeZoneOffsetMinutes(new Date(), preferredZone);
|
||||
options.push({
|
||||
value: preferredZone,
|
||||
label: `${preferredZone} (now ${formatOffset(currentOffset)})`,
|
||||
numericOffset: currentOffset,
|
||||
});
|
||||
options.sort((a, b) => {
|
||||
if (a.numericOffset !== b.numericOffset) {
|
||||
return a.numericOffset - b.numericOffset;
|
||||
}
|
||||
return a.value.localeCompare(b.value);
|
||||
});
|
||||
}
|
||||
return options;
|
||||
};
|
||||
|
||||
const getDefaultTimeZone = () => {
|
||||
try {
|
||||
return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
|
||||
} catch (error) {
|
||||
return 'UTC';
|
||||
}
|
||||
};
|
||||
|
||||
const SettingsPage = () => {
|
||||
const settings = useSettingsStore((s) => s.settings);
|
||||
const userAgents = useUserAgentsStore((s) => s.userAgents);
|
||||
|
|
@ -80,11 +222,53 @@ const SettingsPage = () => {
|
|||
const tmdbSetting = settings['tmdb-api-key'];
|
||||
const [tmdbKey, setTmdbKey] = useState('');
|
||||
const [savingTmdbKey, setSavingTmdbKey] = useState(false);
|
||||
// Store pending changed settings when showing the dialog
|
||||
const [pendingChangedSettings, setPendingChangedSettings] = useState(null);
|
||||
const [comskipFile, setComskipFile] = useState(null);
|
||||
const [comskipUploadLoading, setComskipUploadLoading] = useState(false);
|
||||
const [comskipConfig, setComskipConfig] = useState({
|
||||
path: '',
|
||||
exists: false,
|
||||
});
|
||||
|
||||
// UI / local storage settings
|
||||
const [tableSize, setTableSize] = useLocalStorage('table-size', 'default');
|
||||
const [timeFormat, setTimeFormat] = useLocalStorage('time-format', '12h');
|
||||
const [dateFormat, setDateFormat] = useLocalStorage('date-format', 'mdy');
|
||||
const [timeZone, setTimeZone] = useLocalStorage(
|
||||
'time-zone',
|
||||
getDefaultTimeZone()
|
||||
);
|
||||
const timeZoneOptions = useMemo(
|
||||
() => buildTimeZoneOptions(timeZone),
|
||||
[timeZone]
|
||||
);
|
||||
const timeZoneSyncedRef = useRef(false);
|
||||
|
||||
const persistTimeZoneSetting = useCallback(
|
||||
async (tzValue) => {
|
||||
try {
|
||||
const existing = settings['system-time-zone'];
|
||||
if (existing && existing.id) {
|
||||
await API.updateSetting({ ...existing, value: tzValue });
|
||||
} else {
|
||||
await API.createSetting({
|
||||
key: 'system-time-zone',
|
||||
name: 'System Time Zone',
|
||||
value: tzValue,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to persist time zone setting', error);
|
||||
notifications.show({
|
||||
title: 'Failed to update time zone',
|
||||
message: 'Could not save the selected time zone. Please try again.',
|
||||
color: 'red',
|
||||
});
|
||||
}
|
||||
},
|
||||
[settings]
|
||||
);
|
||||
|
||||
const regionChoices = REGION_CHOICES;
|
||||
|
||||
|
|
@ -101,6 +285,7 @@ const SettingsPage = () => {
|
|||
'dvr-tv-fallback-template': '',
|
||||
'dvr-movie-fallback-template': '',
|
||||
'dvr-comskip-enabled': false,
|
||||
'dvr-comskip-custom-path': '',
|
||||
'dvr-pre-offset-minutes': 0,
|
||||
'dvr-post-offset-minutes': 0,
|
||||
},
|
||||
|
|
@ -179,6 +364,12 @@ const SettingsPage = () => {
|
|||
);
|
||||
|
||||
form.setValues(formValues);
|
||||
if (formValues['dvr-comskip-custom-path']) {
|
||||
setComskipConfig((prev) => ({
|
||||
path: formValues['dvr-comskip-custom-path'],
|
||||
exists: prev.exists,
|
||||
}));
|
||||
}
|
||||
|
||||
const networkAccessSettings = JSON.parse(
|
||||
settings['network-access'].value || '{}'
|
||||
|
|
@ -198,8 +389,39 @@ const SettingsPage = () => {
|
|||
console.error('Error parsing proxy settings:', error);
|
||||
}
|
||||
}
|
||||
|
||||
const tzSetting = settings['system-time-zone'];
|
||||
if (tzSetting?.value) {
|
||||
timeZoneSyncedRef.current = true;
|
||||
setTimeZone((prev) =>
|
||||
prev === tzSetting.value ? prev : tzSetting.value
|
||||
);
|
||||
} else if (!timeZoneSyncedRef.current && timeZone) {
|
||||
timeZoneSyncedRef.current = true;
|
||||
persistTimeZoneSetting(timeZone);
|
||||
}
|
||||
}
|
||||
}, [settings]);
|
||||
}, [settings, timeZone, setTimeZone, persistTimeZoneSetting]);
|
||||
|
||||
useEffect(() => {
|
||||
const loadComskipConfig = async () => {
|
||||
try {
|
||||
const response = await API.getComskipConfig();
|
||||
if (response) {
|
||||
setComskipConfig({
|
||||
path: response.path || '',
|
||||
exists: Boolean(response.exists),
|
||||
});
|
||||
if (response.path) {
|
||||
form.setFieldValue('dvr-comskip-custom-path', response.path);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load comskip config', error);
|
||||
}
|
||||
};
|
||||
loadComskipConfig();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (authUser?.user_level === USER_LEVELS.ADMIN) {
|
||||
|
|
@ -237,6 +459,8 @@ const SettingsPage = () => {
|
|||
|
||||
// If M3U hash key changed, show warning (unless suppressed)
|
||||
if (m3uHashKeyChanged && !isWarningSuppressed('rehash-streams')) {
|
||||
// Store the changed settings before showing dialog
|
||||
setPendingChangedSettings(changedSettings);
|
||||
setRehashDialogType('save'); // Set dialog type to save
|
||||
setRehashConfirmOpen(true);
|
||||
return;
|
||||
|
|
@ -397,6 +621,39 @@ const SettingsPage = () => {
|
|||
setProxySettingsSaved(true);
|
||||
};
|
||||
|
||||
const onComskipUpload = async () => {
|
||||
if (!comskipFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
setComskipUploadLoading(true);
|
||||
try {
|
||||
const response = await API.uploadComskipIni(comskipFile);
|
||||
if (response?.path) {
|
||||
notifications.show({
|
||||
title: 'comskip.ini uploaded',
|
||||
message: response.path,
|
||||
autoClose: 3000,
|
||||
color: 'green',
|
||||
});
|
||||
form.setFieldValue('dvr-comskip-custom-path', response.path);
|
||||
useSettingsStore.getState().updateSetting({
|
||||
...(settings['dvr-comskip-custom-path'] || {
|
||||
key: 'dvr-comskip-custom-path',
|
||||
name: 'DVR Comskip Custom Path',
|
||||
}),
|
||||
value: response.path,
|
||||
});
|
||||
setComskipConfig({ path: response.path, exists: true });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to upload comskip.ini', error);
|
||||
} finally {
|
||||
setComskipUploadLoading(false);
|
||||
setComskipFile(null);
|
||||
}
|
||||
};
|
||||
|
||||
const resetProxySettingsToDefaults = () => {
|
||||
const defaultValues = {
|
||||
buffering_timeout: 15,
|
||||
|
|
@ -430,13 +687,19 @@ const SettingsPage = () => {
|
|||
const onUISettingsChange = (name, value) => {
|
||||
switch (name) {
|
||||
case 'table-size':
|
||||
setTableSize(value);
|
||||
if (value) setTableSize(value);
|
||||
break;
|
||||
case 'time-format':
|
||||
setTimeFormat(value);
|
||||
if (value) setTimeFormat(value);
|
||||
break;
|
||||
case 'date-format':
|
||||
setDateFormat(value);
|
||||
if (value) setDateFormat(value);
|
||||
break;
|
||||
case 'time-zone':
|
||||
if (value) {
|
||||
setTimeZone(value);
|
||||
persistTimeZoneSetting(value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
|
@ -444,23 +707,28 @@ const SettingsPage = () => {
|
|||
const executeSettingsSaveAndRehash = async () => {
|
||||
setRehashConfirmOpen(false);
|
||||
|
||||
// First save the settings
|
||||
const values = form.getValues();
|
||||
const changedSettings = {};
|
||||
// Use the stored pending values that were captured before the dialog was shown
|
||||
const changedSettings = pendingChangedSettings || {};
|
||||
|
||||
for (const settingKey in values) {
|
||||
if (String(values[settingKey]) !== String(settings[settingKey].value)) {
|
||||
changedSettings[settingKey] = `${values[settingKey]}`;
|
||||
// Update each changed setting in the backend (create if missing)
|
||||
for (const updatedKey in changedSettings) {
|
||||
const existing = settings[updatedKey];
|
||||
if (existing && existing.id) {
|
||||
await API.updateSetting({
|
||||
...existing,
|
||||
value: changedSettings[updatedKey],
|
||||
});
|
||||
} else {
|
||||
await API.createSetting({
|
||||
key: updatedKey,
|
||||
name: updatedKey.replace(/-/g, ' '),
|
||||
value: changedSettings[updatedKey],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update each changed setting in the backend
|
||||
for (const updatedKey in changedSettings) {
|
||||
await API.updateSetting({
|
||||
...settings[updatedKey],
|
||||
value: changedSettings[updatedKey],
|
||||
});
|
||||
}
|
||||
// Clear the pending values
|
||||
setPendingChangedSettings(null);
|
||||
};
|
||||
|
||||
const executeRehashStreamsOnly = async () => {
|
||||
|
|
@ -540,7 +808,7 @@ const SettingsPage = () => {
|
|||
data={[
|
||||
{
|
||||
value: '12h',
|
||||
label: '12h hour time',
|
||||
label: '12 hour time',
|
||||
},
|
||||
{
|
||||
value: '24h',
|
||||
|
|
@ -563,6 +831,14 @@ const SettingsPage = () => {
|
|||
},
|
||||
]}
|
||||
/>
|
||||
<Select
|
||||
label="Time zone"
|
||||
searchable
|
||||
nothingFoundMessage="No matches"
|
||||
value={timeZone}
|
||||
onChange={(val) => onUISettingsChange('time-zone', val)}
|
||||
data={timeZoneOptions}
|
||||
/>
|
||||
</Accordion.Panel>
|
||||
</Accordion.Item>
|
||||
|
||||
|
|
@ -707,6 +983,46 @@ const SettingsPage = () => {
|
|||
'dvr-comskip-enabled'
|
||||
}
|
||||
/>
|
||||
<TextInput
|
||||
label="Custom comskip.ini path"
|
||||
description="Leave blank to use the built-in defaults."
|
||||
placeholder="/app/docker/comskip.ini"
|
||||
{...form.getInputProps('dvr-comskip-custom-path')}
|
||||
key={form.key('dvr-comskip-custom-path')}
|
||||
id={
|
||||
settings['dvr-comskip-custom-path']?.id ||
|
||||
'dvr-comskip-custom-path'
|
||||
}
|
||||
name={
|
||||
settings['dvr-comskip-custom-path']?.key ||
|
||||
'dvr-comskip-custom-path'
|
||||
}
|
||||
/>
|
||||
<Group align="flex-end" gap="sm">
|
||||
<FileInput
|
||||
placeholder="Select comskip.ini"
|
||||
accept=".ini"
|
||||
value={comskipFile}
|
||||
onChange={setComskipFile}
|
||||
clearable
|
||||
disabled={comskipUploadLoading}
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
<Button
|
||||
variant="light"
|
||||
onClick={onComskipUpload}
|
||||
disabled={!comskipFile || comskipUploadLoading}
|
||||
>
|
||||
{comskipUploadLoading
|
||||
? 'Uploading...'
|
||||
: 'Upload comskip.ini'}
|
||||
</Button>
|
||||
</Group>
|
||||
<Text size="xs" c="dimmed">
|
||||
{comskipConfig.exists && comskipConfig.path
|
||||
? `Using ${comskipConfig.path}`
|
||||
: 'No custom comskip.ini uploaded.'}
|
||||
</Text>
|
||||
<NumberInput
|
||||
label="Start early (minutes)"
|
||||
description="Begin recording this many minutes before the scheduled start."
|
||||
|
|
@ -909,6 +1225,10 @@ const SettingsPage = () => {
|
|||
value: 'tvg_id',
|
||||
label: 'TVG-ID',
|
||||
},
|
||||
{
|
||||
value: 'm3u_id',
|
||||
label: 'M3U ID',
|
||||
},
|
||||
]}
|
||||
{...form.getInputProps('m3u-hash-key')}
|
||||
key={form.key('m3u-hash-key')}
|
||||
|
|
@ -1141,6 +1461,8 @@ const SettingsPage = () => {
|
|||
onClose={() => {
|
||||
setRehashConfirmOpen(false);
|
||||
setRehashDialogType(null);
|
||||
// Clear pending values when dialog is cancelled
|
||||
setPendingChangedSettings(null);
|
||||
}}
|
||||
onConfirm={handleRehashConfirm}
|
||||
title={
|
||||
|
|
|
|||
100
frontend/src/pages/__tests__/guideUtils.test.js
Normal file
100
frontend/src/pages/__tests__/guideUtils.test.js
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
import { describe, it, expect } from 'vitest';
|
||||
import dayjs from 'dayjs';
|
||||
import {
|
||||
PROGRAM_HEIGHT,
|
||||
EXPANDED_PROGRAM_HEIGHT,
|
||||
buildChannelIdMap,
|
||||
mapProgramsByChannel,
|
||||
computeRowHeights,
|
||||
} from '../guideUtils.js';
|
||||
|
||||
describe('guideUtils', () => {
|
||||
describe('buildChannelIdMap', () => {
|
||||
it('maps tvg ids from epg records and falls back to channel uuid', () => {
|
||||
const channels = [
|
||||
{ id: 1, epg_data_id: 'epg-1', uuid: 'uuid-1' },
|
||||
{ id: 2, epg_data_id: null, uuid: 'uuid-2' },
|
||||
];
|
||||
const tvgsById = {
|
||||
'epg-1': { tvg_id: 'alpha' },
|
||||
};
|
||||
|
||||
const map = buildChannelIdMap(channels, tvgsById);
|
||||
|
||||
expect(map.get('alpha')).toBe(1);
|
||||
expect(map.get('uuid-2')).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mapProgramsByChannel', () => {
|
||||
it('groups programs by channel and sorts them by start time', () => {
|
||||
const programs = [
|
||||
{
|
||||
id: 10,
|
||||
tvg_id: 'alpha',
|
||||
start_time: dayjs('2025-01-01T02:00:00Z').toISOString(),
|
||||
end_time: dayjs('2025-01-01T03:00:00Z').toISOString(),
|
||||
title: 'Late Show',
|
||||
},
|
||||
{
|
||||
id: 11,
|
||||
tvg_id: 'alpha',
|
||||
start_time: dayjs('2025-01-01T01:00:00Z').toISOString(),
|
||||
end_time: dayjs('2025-01-01T02:00:00Z').toISOString(),
|
||||
title: 'Evening News',
|
||||
},
|
||||
{
|
||||
id: 20,
|
||||
tvg_id: 'beta',
|
||||
start_time: dayjs('2025-01-01T00:00:00Z').toISOString(),
|
||||
end_time: dayjs('2025-01-01T01:00:00Z').toISOString(),
|
||||
title: 'Morning Show',
|
||||
},
|
||||
];
|
||||
|
||||
const channelIdByTvgId = new Map([
|
||||
['alpha', 1],
|
||||
['beta', 2],
|
||||
]);
|
||||
|
||||
const map = mapProgramsByChannel(programs, channelIdByTvgId);
|
||||
|
||||
expect(map.get(1)).toHaveLength(2);
|
||||
expect(map.get(1)?.map((item) => item.id)).toEqual([11, 10]);
|
||||
expect(map.get(2)).toHaveLength(1);
|
||||
expect(map.get(2)?.[0].startMs).toBeTypeOf('number');
|
||||
expect(map.get(2)?.[0].endMs).toBeTypeOf('number');
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeRowHeights', () => {
|
||||
it('returns program heights with expanded rows when needed', () => {
|
||||
const filteredChannels = [
|
||||
{ id: 1 },
|
||||
{ id: 2 },
|
||||
];
|
||||
|
||||
const programsByChannel = new Map([
|
||||
[1, [{ id: 10 }, { id: 11 }]],
|
||||
[2, [{ id: 20 }]],
|
||||
]);
|
||||
|
||||
const collapsed = computeRowHeights(
|
||||
filteredChannels,
|
||||
programsByChannel,
|
||||
null
|
||||
);
|
||||
expect(collapsed).toEqual([PROGRAM_HEIGHT, PROGRAM_HEIGHT]);
|
||||
|
||||
const expanded = computeRowHeights(
|
||||
filteredChannels,
|
||||
programsByChannel,
|
||||
10
|
||||
);
|
||||
expect(expanded).toEqual([
|
||||
EXPANDED_PROGRAM_HEIGHT,
|
||||
PROGRAM_HEIGHT,
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
frontend/src/pages/guideUtils.js
Normal file
79
frontend/src/pages/guideUtils.js
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
import dayjs from 'dayjs';
|
||||
|
||||
export const PROGRAM_HEIGHT = 90;
|
||||
export const EXPANDED_PROGRAM_HEIGHT = 180;
|
||||
|
||||
export function buildChannelIdMap(channels, tvgsById) {
|
||||
const map = new Map();
|
||||
channels.forEach((channel) => {
|
||||
const tvgRecord = channel.epg_data_id
|
||||
? tvgsById[channel.epg_data_id]
|
||||
: null;
|
||||
const tvgId = tvgRecord?.tvg_id ?? channel.uuid;
|
||||
if (tvgId) {
|
||||
const tvgKey = String(tvgId);
|
||||
if (!map.has(tvgKey)) {
|
||||
map.set(tvgKey, []);
|
||||
}
|
||||
map.get(tvgKey).push(channel.id);
|
||||
}
|
||||
});
|
||||
return map;
|
||||
}
|
||||
|
||||
export function mapProgramsByChannel(programs, channelIdByTvgId) {
|
||||
if (!programs?.length || !channelIdByTvgId?.size) {
|
||||
return new Map();
|
||||
}
|
||||
|
||||
const map = new Map();
|
||||
programs.forEach((program) => {
|
||||
const channelIds = channelIdByTvgId.get(String(program.tvg_id));
|
||||
if (!channelIds || channelIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const startMs = program.startMs ?? dayjs(program.start_time).valueOf();
|
||||
const endMs = program.endMs ?? dayjs(program.end_time).valueOf();
|
||||
|
||||
const programData = {
|
||||
...program,
|
||||
startMs,
|
||||
endMs,
|
||||
};
|
||||
|
||||
// Add this program to all channels that share the same TVG ID
|
||||
channelIds.forEach((channelId) => {
|
||||
if (!map.has(channelId)) {
|
||||
map.set(channelId, []);
|
||||
}
|
||||
map.get(channelId).push(programData);
|
||||
});
|
||||
});
|
||||
|
||||
map.forEach((list) => {
|
||||
list.sort((a, b) => a.startMs - b.startMs);
|
||||
});
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
export function computeRowHeights(
|
||||
filteredChannels,
|
||||
programsByChannelId,
|
||||
expandedProgramId,
|
||||
defaultHeight = PROGRAM_HEIGHT,
|
||||
expandedHeight = EXPANDED_PROGRAM_HEIGHT
|
||||
) {
|
||||
if (!filteredChannels?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return filteredChannels.map((channel) => {
|
||||
const channelPrograms = programsByChannelId.get(channel.id) || [];
|
||||
const expanded = channelPrograms.some(
|
||||
(program) => program.id === expandedProgramId
|
||||
);
|
||||
return expanded ? expandedHeight : defaultHeight;
|
||||
});
|
||||
}
|
||||
|
|
@ -15,6 +15,7 @@ const useChannelsStore = create((set, get) => ({
|
|||
activeChannels: {},
|
||||
activeClients: {},
|
||||
recordings: [],
|
||||
recurringRules: [],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
forceUpdate: 0,
|
||||
|
|
@ -408,6 +409,23 @@ const useChannelsStore = create((set, get) => ({
|
|||
}
|
||||
},
|
||||
|
||||
fetchRecurringRules: async () => {
|
||||
try {
|
||||
const rules = await api.listRecurringRules();
|
||||
set({ recurringRules: Array.isArray(rules) ? rules : [] });
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch recurring DVR rules:', error);
|
||||
set({ error: 'Failed to load recurring DVR rules.' });
|
||||
}
|
||||
},
|
||||
|
||||
removeRecurringRule: (id) =>
|
||||
set((state) => ({
|
||||
recurringRules: Array.isArray(state.recurringRules)
|
||||
? state.recurringRules.filter((rule) => String(rule?.id) !== String(id))
|
||||
: [],
|
||||
})),
|
||||
|
||||
// Optimistically remove a single recording from the local store
|
||||
removeRecording: (id) =>
|
||||
set((state) => {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import api from '../api';
|
|||
|
||||
const useLogosStore = create((set, get) => ({
|
||||
logos: {},
|
||||
channelLogos: {}, // Separate state for channel-assignable logos
|
||||
channelLogos: {}, // Keep this for simplicity, but we'll be more careful about when we populate it
|
||||
isLoading: false,
|
||||
backgroundLoading: false,
|
||||
hasLoadedAll: false, // Track if we've loaded all logos
|
||||
|
|
@ -21,12 +21,29 @@ const useLogosStore = create((set, get) => ({
|
|||
},
|
||||
|
||||
addLogo: (newLogo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
set((state) => {
|
||||
// Add to main logos store always
|
||||
const newLogos = {
|
||||
...state.logos,
|
||||
[newLogo.id]: { ...newLogo },
|
||||
},
|
||||
})),
|
||||
};
|
||||
|
||||
// Add to channelLogos if the user has loaded channel-assignable logos
|
||||
// This means they're using channel forms and the new logo should be available there
|
||||
// Newly created logos are channel-assignable (they start unused)
|
||||
let newChannelLogos = state.channelLogos;
|
||||
if (state.hasLoadedChannelLogos) {
|
||||
newChannelLogos = {
|
||||
...state.channelLogos,
|
||||
[newLogo.id]: { ...newLogo },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
logos: newLogos,
|
||||
channelLogos: newChannelLogos,
|
||||
};
|
||||
}),
|
||||
|
||||
updateLogo: (logo) =>
|
||||
set((state) => ({
|
||||
|
|
@ -34,13 +51,25 @@ const useLogosStore = create((set, get) => ({
|
|||
...state.logos,
|
||||
[logo.id]: { ...logo },
|
||||
},
|
||||
// Update in channelLogos if it exists there
|
||||
channelLogos: state.channelLogos[logo.id]
|
||||
? {
|
||||
...state.channelLogos,
|
||||
[logo.id]: { ...logo },
|
||||
}
|
||||
: state.channelLogos,
|
||||
})),
|
||||
|
||||
removeLogo: (logoId) =>
|
||||
set((state) => {
|
||||
const newLogos = { ...state.logos };
|
||||
const newChannelLogos = { ...state.channelLogos };
|
||||
delete newLogos[logoId];
|
||||
return { logos: newLogos };
|
||||
delete newChannelLogos[logoId];
|
||||
return {
|
||||
logos: newLogos,
|
||||
channelLogos: newChannelLogos,
|
||||
};
|
||||
}),
|
||||
|
||||
// Smart loading methods
|
||||
|
|
@ -155,8 +184,15 @@ const useLogosStore = create((set, get) => ({
|
|||
|
||||
console.log(`Fetched ${logos.length} channel-assignable logos`);
|
||||
|
||||
// Store in separate channelLogos state
|
||||
// Store in both places, but this is intentional and only when specifically requested
|
||||
set({
|
||||
logos: {
|
||||
...get().logos, // Keep existing logos
|
||||
...logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
channelLogos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
|
|
|
|||
42
frontend/src/test/setupTests.js
Normal file
42
frontend/src/test/setupTests.js
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import '@testing-library/jest-dom/vitest';
|
||||
import { afterEach, vi } from 'vitest';
|
||||
import { cleanup } from '@testing-library/react';
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
if (typeof window !== 'undefined' && !window.matchMedia) {
|
||||
window.matchMedia = vi.fn().mockImplementation((query) => ({
|
||||
matches: false,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: vi.fn(),
|
||||
removeListener: vi.fn(),
|
||||
addEventListener: vi.fn(),
|
||||
removeEventListener: vi.fn(),
|
||||
dispatchEvent: vi.fn(),
|
||||
}));
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined' && !window.ResizeObserver) {
|
||||
class ResizeObserver {
|
||||
constructor(callback) {
|
||||
this.callback = callback;
|
||||
}
|
||||
observe() {}
|
||||
unobserve() {}
|
||||
disconnect() {}
|
||||
}
|
||||
|
||||
window.ResizeObserver = ResizeObserver;
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
if (!window.requestAnimationFrame) {
|
||||
window.requestAnimationFrame = (cb) => setTimeout(cb, 16);
|
||||
}
|
||||
if (!window.cancelAnimationFrame) {
|
||||
window.cancelAnimationFrame = (id) => clearTimeout(id);
|
||||
}
|
||||
}
|
||||
|
|
@ -26,4 +26,10 @@ export default defineConfig({
|
|||
// },
|
||||
// },
|
||||
},
|
||||
|
||||
test: {
|
||||
environment: 'jsdom',
|
||||
setupFiles: ['./src/test/setupTests.js'],
|
||||
globals: true,
|
||||
},
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,182 +0,0 @@
|
|||
# ml_model.py
|
||||
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import logging
|
||||
|
||||
from rapidfuzz import fuzz
|
||||
from sentence_transformers import util
|
||||
from sentence_transformers import SentenceTransformer as st
|
||||
|
||||
# Set up logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Load the sentence-transformers model once at the module level
|
||||
SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
|
||||
MODEL_PATH = os.path.join("/app", "models", "all-MiniLM-L6-v2")
|
||||
|
||||
# Thresholds
|
||||
BEST_FUZZY_THRESHOLD = 85
|
||||
LOWER_FUZZY_THRESHOLD = 40
|
||||
EMBED_SIM_THRESHOLD = 0.65
|
||||
|
||||
def process_data(input_data):
|
||||
os.makedirs(MODEL_PATH, exist_ok=True)
|
||||
|
||||
# If not present locally, download:
|
||||
if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
|
||||
logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
|
||||
st_model = st(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
|
||||
else:
|
||||
logger.info(f"Loading local model from {MODEL_PATH}")
|
||||
st_model = st(MODEL_PATH)
|
||||
|
||||
channels = input_data["channels"]
|
||||
epg_data = input_data["epg_data"]
|
||||
region_code = input_data.get("region_code", None)
|
||||
|
||||
epg_embeddings = None
|
||||
if any(row["norm_name"] for row in epg_data):
|
||||
epg_embeddings = st_model.encode(
|
||||
[row["norm_name"] for row in epg_data],
|
||||
convert_to_tensor=True
|
||||
)
|
||||
|
||||
channels_to_update = []
|
||||
matched_channels = []
|
||||
|
||||
for chan in channels:
|
||||
normalized_tvg_id = chan.get("tvg_id", "")
|
||||
fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"]
|
||||
|
||||
# Exact TVG ID match (direct match)
|
||||
epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None)
|
||||
if normalized_tvg_id and epg_by_tvg_id:
|
||||
chan["epg_data_id"] = epg_by_tvg_id["id"]
|
||||
channels_to_update.append(chan)
|
||||
|
||||
# Add to matched_channels list so it's counted in the total
|
||||
matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"]))
|
||||
|
||||
logger.info(f"Channel {chan['id']} '{fallback_name}' => EPG found by tvg_id={epg_by_tvg_id['tvg_id']}")
|
||||
continue
|
||||
|
||||
# If channel has a tvg_id that doesn't exist in EPGData, do direct check.
|
||||
# I don't THINK this should happen now that we assign EPG on channel creation.
|
||||
if chan["tvg_id"]:
|
||||
epg_match = [epg["id"] for epg in epg_data if epg["tvg_id"] == chan["tvg_id"]]
|
||||
if epg_match:
|
||||
chan["epg_data_id"] = epg_match[0]
|
||||
logger.info(f"Channel {chan['id']} '{chan['name']}' => EPG found by tvg_id={chan['tvg_id']}")
|
||||
channels_to_update.append(chan)
|
||||
continue
|
||||
|
||||
# C) Perform name-based fuzzy matching
|
||||
if not chan["norm_chan"]:
|
||||
logger.debug(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping")
|
||||
continue
|
||||
|
||||
best_score = 0
|
||||
best_epg = None
|
||||
for row in epg_data:
|
||||
if not row["norm_name"]:
|
||||
continue
|
||||
|
||||
base_score = fuzz.ratio(chan["norm_chan"], row["norm_name"])
|
||||
bonus = 0
|
||||
# Region-based bonus/penalty
|
||||
combined_text = row["tvg_id"].lower() + " " + row["name"].lower()
|
||||
dot_regions = re.findall(r'\.([a-z]{2})', combined_text)
|
||||
if region_code:
|
||||
if dot_regions:
|
||||
if region_code in dot_regions:
|
||||
bonus = 30 # bigger bonus if .us or .ca matches
|
||||
else:
|
||||
bonus = -15
|
||||
elif region_code in combined_text:
|
||||
bonus = 15
|
||||
score = base_score + bonus
|
||||
|
||||
logger.debug(
|
||||
f"Channel {chan['id']} '{fallback_name}' => EPG row {row['id']}: "
|
||||
f"name='{row['name']}', norm_name='{row['norm_name']}', "
|
||||
f"combined_text='{combined_text}', dot_regions={dot_regions}, "
|
||||
f"base_score={base_score}, bonus={bonus}, total_score={score}"
|
||||
)
|
||||
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_epg = row
|
||||
|
||||
# If no best match was found, skip
|
||||
if not best_epg:
|
||||
logger.debug(f"Channel {chan['id']} '{fallback_name}' => no EPG match at all.")
|
||||
continue
|
||||
|
||||
# If best_score is above BEST_FUZZY_THRESHOLD => direct accept
|
||||
if best_score >= BEST_FUZZY_THRESHOLD:
|
||||
chan["epg_data_id"] = best_epg["id"]
|
||||
channels_to_update.append(chan)
|
||||
|
||||
matched_channels.append((chan['id'], fallback_name, best_epg["tvg_id"]))
|
||||
logger.info(
|
||||
f"Channel {chan['id']} '{fallback_name}' => matched tvg_id={best_epg['tvg_id']} "
|
||||
f"(score={best_score})"
|
||||
)
|
||||
|
||||
# If best_score is in the “middle range,” do embedding check
|
||||
elif best_score >= LOWER_FUZZY_THRESHOLD and epg_embeddings is not None:
|
||||
chan_embedding = st_model.encode(chan["norm_chan"], convert_to_tensor=True)
|
||||
sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0]
|
||||
top_index = int(sim_scores.argmax())
|
||||
top_value = float(sim_scores[top_index])
|
||||
if top_value >= EMBED_SIM_THRESHOLD:
|
||||
matched_epg = epg_data[top_index]
|
||||
chan["epg_data_id"] = matched_epg["id"]
|
||||
channels_to_update.append(chan)
|
||||
|
||||
matched_channels.append((chan['id'], fallback_name, matched_epg["tvg_id"]))
|
||||
logger.info(
|
||||
f"Channel {chan['id']} '{fallback_name}' => matched EPG tvg_id={matched_epg['tvg_id']} "
|
||||
f"(fuzzy={best_score}, cos-sim={top_value:.2f})"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Channel {chan['id']} '{fallback_name}' => fuzzy={best_score}, "
|
||||
f"cos-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, skipping"
|
||||
)
|
||||
else:
|
||||
# No good match found - fuzzy score is too low
|
||||
logger.info(
|
||||
f"Channel {chan['id']} '{fallback_name}' => best fuzzy match score={best_score} < {LOWER_FUZZY_THRESHOLD}, skipping"
|
||||
)
|
||||
|
||||
return {
|
||||
"channels_to_update": channels_to_update,
|
||||
"matched_channels": matched_channels
|
||||
}
|
||||
|
||||
def main():
|
||||
# Configure logging
|
||||
logging_level = os.environ.get('DISPATCHARR_LOG_LEVEL', 'INFO')
|
||||
logging.basicConfig(
|
||||
level=getattr(logging, logging_level),
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
stream=sys.stderr
|
||||
)
|
||||
|
||||
# Read input data from a file
|
||||
input_file_path = sys.argv[1]
|
||||
with open(input_file_path, 'r') as f:
|
||||
input_data = json.load(f)
|
||||
|
||||
# Process data with the ML model (or your logic)
|
||||
result = process_data(input_data)
|
||||
|
||||
# Output result to stdout
|
||||
print(json.dumps(result))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Dispatcharr version information.
|
||||
"""
|
||||
__version__ = '0.9.1' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__version__ = '0.10.3' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__timestamp__ = None # Set during CI/CD build process
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue