mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'dev' into enhancement/component-cleanup
This commit is contained in:
commit
097551ccf7
47 changed files with 5186 additions and 784 deletions
|
|
@ -31,3 +31,4 @@
|
|||
LICENSE
|
||||
README.md
|
||||
data/
|
||||
docker/data/
|
||||
|
|
|
|||
87
.github/workflows/base-image.yml
vendored
87
.github/workflows/base-image.yml
vendored
|
|
@ -101,6 +101,28 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker base image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
|
|
@ -113,6 +135,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -154,18 +177,74 @@ jobs:
|
|||
|
||||
# GitHub Container Registry manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
87
.github/workflows/ci.yml
vendored
87
.github/workflows/ci.yml
vendored
|
|
@ -119,7 +119,27 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# use metadata from the prepare job
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
|
|
@ -137,6 +157,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -181,16 +202,72 @@ jobs:
|
|||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# branch tag (e.g. latest or dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# version + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
||||
# also create Docker Hub manifests using the same username
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
95
.github/workflows/release.yml
vendored
95
.github/workflows/release.yml
vendored
|
|
@ -25,6 +25,7 @@ jobs:
|
|||
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -56,6 +57,12 @@ jobs:
|
|||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py CHANGELOG.md
|
||||
|
|
@ -104,6 +111,28 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
|
|
@ -115,6 +144,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -149,25 +179,82 @@ jobs:
|
|||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=latest" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=latest" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
create-release:
|
||||
|
|
|
|||
31
CHANGELOG.md
31
CHANGELOG.md
|
|
@ -7,6 +7,37 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
|
||||
- VOD client stop button in Stats page: Users can now disconnect individual VOD clients from the Stats view, similar to the existing channel client disconnect functionality.
|
||||
- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153)
|
||||
- Stream group as available hash option: Users can now select 'Group' as a hash key option in Settings → Stream Settings → M3U Hash Key, allowing streams to be differentiated by their group membership in addition to name, URL, TVG-ID, and M3U ID
|
||||
|
||||
### Changed
|
||||
|
||||
- Initial super user creation page now matches the login page design with logo, welcome message, divider, and version display for a more consistent and polished first-time setup experience
|
||||
- Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom)
|
||||
- GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724)
|
||||
- Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack)
|
||||
- Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662)
|
||||
- Frontend dependencies updated: Vite (6.2.0 → 7.1.7), ESLint (9.21.0 → 9.27.0), and related packages; added npm `overrides` to enforce js-yaml@^4.1.1 for transitive security fix. All 6 reported vulnerabilities resolved with `npm audit fix`.
|
||||
- Floating video player now supports resizing via a drag handles, with minimum size enforcement and viewport/page boundary constraints to keep it visible.
|
||||
- Redis connection settings now fully configurable via environment variables (`REDIS_HOST`, `REDIS_PORT`, `REDIS_DB`, `REDIS_URL`), replacing hardcoded `localhost:6379` values throughout the codebase. This enables use of external Redis services in production deployments. (Closes #762)
|
||||
- Celery broker and result backend URLs now respect `REDIS_HOST`/`REDIS_PORT`/`REDIS_DB` settings as defaults, with `CELERY_BROKER_URL` and `CELERY_RESULT_BACKEND` environment variables available for override.
|
||||
|
||||
### Fixed
|
||||
|
||||
- M3U Profile form now properly resets local state for search and replace patterns after saving, preventing validation errors when adding multiple profiles in a row
|
||||
- DVR series rule deletion now properly handles TVG IDs that contain slashes by encoding them in the URL path (Fixes #697)
|
||||
- VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556)
|
||||
- XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569)
|
||||
- XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities)
|
||||
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744)
|
||||
- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id
|
||||
- XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError
|
||||
- XtreamCodes EPG `id` field now provides unique identifiers per program listing instead of always returning "0" for better client EPG handling
|
||||
- XtreamCodes EPG `epg_id` field now correctly returns the EPGData record ID (representing the EPG source/channel mapping) instead of a dummy value
|
||||
|
||||
## [0.14.0] - 2025-12-09
|
||||
|
||||
### Added
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ urlpatterns = [
|
|||
path('core/', include(('core.api_urls', 'core'), namespace='core')),
|
||||
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
|
||||
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
|
||||
path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')),
|
||||
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
|
||||
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
|
||||
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
|
||||
|
|
|
|||
0
apps/backups/__init__.py
Normal file
0
apps/backups/__init__.py
Normal file
18
apps/backups/api_urls.py
Normal file
18
apps/backups/api_urls.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from django.urls import path
|
||||
|
||||
from . import api_views
|
||||
|
||||
app_name = "backups"
|
||||
|
||||
urlpatterns = [
|
||||
path("", api_views.list_backups, name="backup-list"),
|
||||
path("create/", api_views.create_backup, name="backup-create"),
|
||||
path("upload/", api_views.upload_backup, name="backup-upload"),
|
||||
path("schedule/", api_views.get_schedule, name="backup-schedule-get"),
|
||||
path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"),
|
||||
path("status/<str:task_id>/", api_views.backup_status, name="backup-status"),
|
||||
path("<str:filename>/download-token/", api_views.get_download_token, name="backup-download-token"),
|
||||
path("<str:filename>/download/", api_views.download_backup, name="backup-download"),
|
||||
path("<str:filename>/delete/", api_views.delete_backup, name="backup-delete"),
|
||||
path("<str:filename>/restore/", api_views.restore_backup, name="backup-restore"),
|
||||
]
|
||||
364
apps/backups/api_views.py
Normal file
364
apps/backups/api_views.py
Normal file
|
|
@ -0,0 +1,364 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse, StreamingHttpResponse, Http404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes, parser_classes
|
||||
from rest_framework.permissions import IsAdminUser, AllowAny
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from . import services
|
||||
from .tasks import create_backup_task, restore_backup_task
|
||||
from .scheduler import get_schedule_settings, update_schedule_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _generate_task_token(task_id: str) -> str:
|
||||
"""Generate a signed token for task status access without auth."""
|
||||
secret = settings.SECRET_KEY.encode()
|
||||
return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32]
|
||||
|
||||
|
||||
def _verify_task_token(task_id: str, token: str) -> bool:
|
||||
"""Verify a task token is valid."""
|
||||
expected = _generate_task_token(task_id)
|
||||
return hmac.compare_digest(expected, token)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def list_backups(request):
|
||||
"""List all available backup files."""
|
||||
try:
|
||||
backups = services.list_backups()
|
||||
return Response(backups, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to list backups: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def create_backup(request):
|
||||
"""Create a new backup (async via Celery)."""
|
||||
try:
|
||||
task = create_backup_task.delay()
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start backup: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def backup_status(request, task_id):
|
||||
"""Check the status of a backup/restore task.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid task_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (for restore when session is invalidated)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(task_id, token):
|
||||
return Response(
|
||||
{"detail": "Invalid task token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
result = AsyncResult(task_id)
|
||||
|
||||
if result.ready():
|
||||
task_result = result.get()
|
||||
if task_result.get("status") == "completed":
|
||||
return Response({
|
||||
"state": "completed",
|
||||
"result": task_result,
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": task_result.get("error", "Unknown error"),
|
||||
})
|
||||
elif result.failed():
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": str(result.result),
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": result.state.lower(),
|
||||
})
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get task status: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_download_token(request, filename):
|
||||
"""Get a signed token for downloading a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
token = _generate_task_token(filename)
|
||||
return Response({"token": token})
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to generate token: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def download_backup(request, filename):
|
||||
"""Download a backup file.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid download_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (avoids CORS preflight issues)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(filename, token):
|
||||
return Response(
|
||||
{"detail": "Invalid download token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
# Security: prevent path traversal by checking for suspicious characters
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = (backup_dir / filename).resolve()
|
||||
|
||||
# Security: ensure the resolved path is still within backup_dir
|
||||
if not str(backup_file).startswith(str(backup_dir.resolve())):
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
if not backup_file.exists() or not backup_file.is_file():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
file_size = backup_file.stat().st_size
|
||||
|
||||
# Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly
|
||||
# Fall back to streaming for non-nginx deployments
|
||||
use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true"
|
||||
logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}")
|
||||
|
||||
if use_nginx_accel:
|
||||
# X-Accel-Redirect: Django returns immediately, nginx serves file
|
||||
logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}")
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = f"/protected-backups/{filename}"
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
else:
|
||||
# Streaming fallback for non-nginx deployments
|
||||
logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)")
|
||||
def file_iterator(file_path, chunk_size=2 * 1024 * 1024):
|
||||
with open(file_path, "rb") as f:
|
||||
while chunk := f.read(chunk_size):
|
||||
yield chunk
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
file_iterator(backup_file),
|
||||
content_type="application/zip",
|
||||
)
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Download failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def delete_backup(request, filename):
|
||||
"""Delete a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
services.delete_backup(filename)
|
||||
return Response(
|
||||
{"detail": "Backup deleted successfully"},
|
||||
status=status.HTTP_204_NO_CONTENT,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise Http404("Backup file not found")
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Delete failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
@parser_classes([MultiPartParser, FormParser])
|
||||
def upload_backup(request):
|
||||
"""Upload a backup file for restoration."""
|
||||
uploaded = request.FILES.get("file")
|
||||
if not uploaded:
|
||||
return Response(
|
||||
{"detail": "No file uploaded"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
backup_dir = services.get_backup_dir()
|
||||
filename = uploaded.name or "uploaded-backup.zip"
|
||||
|
||||
# Ensure unique filename
|
||||
backup_file = backup_dir / filename
|
||||
counter = 1
|
||||
while backup_file.exists():
|
||||
name_parts = filename.rsplit(".", 1)
|
||||
if len(name_parts) == 2:
|
||||
backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}"
|
||||
else:
|
||||
backup_file = backup_dir / f"{filename}-{counter}"
|
||||
counter += 1
|
||||
|
||||
# Save uploaded file
|
||||
with backup_file.open("wb") as f:
|
||||
for chunk in uploaded.chunks():
|
||||
f.write(chunk)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup uploaded successfully",
|
||||
"filename": backup_file.name,
|
||||
},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Upload failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def restore_backup(request, filename):
|
||||
"""Restore from a backup file (async via Celery). WARNING: This will flush the database!"""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
task = restore_backup_task.delay(filename)
|
||||
return Response(
|
||||
{
|
||||
"detail": "Restore started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start restore: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_schedule(request):
|
||||
"""Get backup schedule settings."""
|
||||
try:
|
||||
settings = get_schedule_settings()
|
||||
return Response(settings)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["PUT"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def update_schedule(request):
|
||||
"""Update backup schedule settings."""
|
||||
try:
|
||||
settings = update_schedule_settings(request.data)
|
||||
return Response(settings)
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
{"detail": str(e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to update schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
7
apps/backups/apps.py
Normal file
7
apps/backups/apps.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class BackupsConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.backups"
|
||||
verbose_name = "Backups"
|
||||
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/models.py
Normal file
0
apps/backups/models.py
Normal file
203
apps/backups/scheduler.py
Normal file
203
apps/backups/scheduler.py
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from django_celery_beat.models import PeriodicTask, CrontabSchedule
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task"
|
||||
|
||||
SETTING_KEYS = {
|
||||
"enabled": "backup_schedule_enabled",
|
||||
"frequency": "backup_schedule_frequency",
|
||||
"time": "backup_schedule_time",
|
||||
"day_of_week": "backup_schedule_day_of_week",
|
||||
"retention_count": "backup_retention_count",
|
||||
"cron_expression": "backup_schedule_cron_expression",
|
||||
}
|
||||
|
||||
DEFAULTS = {
|
||||
"enabled": True,
|
||||
"frequency": "daily",
|
||||
"time": "03:00",
|
||||
"day_of_week": 0, # Sunday
|
||||
"retention_count": 3,
|
||||
"cron_expression": "",
|
||||
}
|
||||
|
||||
|
||||
def _get_setting(key: str, default=None):
|
||||
"""Get a backup setting from CoreSettings."""
|
||||
try:
|
||||
setting = CoreSettings.objects.get(key=SETTING_KEYS[key])
|
||||
value = setting.value
|
||||
if key == "enabled":
|
||||
return value.lower() == "true"
|
||||
elif key in ("day_of_week", "retention_count"):
|
||||
return int(value)
|
||||
return value
|
||||
except CoreSettings.DoesNotExist:
|
||||
return default if default is not None else DEFAULTS.get(key)
|
||||
|
||||
|
||||
def _set_setting(key: str, value) -> None:
|
||||
"""Set a backup setting in CoreSettings."""
|
||||
str_value = str(value).lower() if isinstance(value, bool) else str(value)
|
||||
CoreSettings.objects.update_or_create(
|
||||
key=SETTING_KEYS[key],
|
||||
defaults={
|
||||
"name": f"Backup {key.replace('_', ' ').title()}",
|
||||
"value": str_value,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def get_schedule_settings() -> dict:
|
||||
"""Get all backup schedule settings."""
|
||||
return {
|
||||
"enabled": _get_setting("enabled"),
|
||||
"frequency": _get_setting("frequency"),
|
||||
"time": _get_setting("time"),
|
||||
"day_of_week": _get_setting("day_of_week"),
|
||||
"retention_count": _get_setting("retention_count"),
|
||||
"cron_expression": _get_setting("cron_expression"),
|
||||
}
|
||||
|
||||
|
||||
def update_schedule_settings(data: dict) -> dict:
|
||||
"""Update backup schedule settings and sync the PeriodicTask."""
|
||||
# Validate
|
||||
if "frequency" in data and data["frequency"] not in ("daily", "weekly"):
|
||||
raise ValueError("frequency must be 'daily' or 'weekly'")
|
||||
|
||||
if "time" in data:
|
||||
try:
|
||||
hour, minute = data["time"].split(":")
|
||||
int(hour)
|
||||
int(minute)
|
||||
except (ValueError, AttributeError):
|
||||
raise ValueError("time must be in HH:MM format")
|
||||
|
||||
if "day_of_week" in data:
|
||||
day = int(data["day_of_week"])
|
||||
if day < 0 or day > 6:
|
||||
raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)")
|
||||
|
||||
if "retention_count" in data:
|
||||
count = int(data["retention_count"])
|
||||
if count < 0:
|
||||
raise ValueError("retention_count must be >= 0")
|
||||
|
||||
# Update settings
|
||||
for key in ("enabled", "frequency", "time", "day_of_week", "retention_count", "cron_expression"):
|
||||
if key in data:
|
||||
_set_setting(key, data[key])
|
||||
|
||||
# Sync the periodic task
|
||||
_sync_periodic_task()
|
||||
|
||||
return get_schedule_settings()
|
||||
|
||||
|
||||
def _sync_periodic_task() -> None:
|
||||
"""Create, update, or delete the scheduled backup task based on settings."""
|
||||
settings = get_schedule_settings()
|
||||
|
||||
if not settings["enabled"]:
|
||||
# Delete the task if it exists
|
||||
task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first()
|
||||
if task:
|
||||
old_crontab = task.crontab
|
||||
task.delete()
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
logger.info("Backup schedule disabled, removed periodic task")
|
||||
return
|
||||
|
||||
# Get old crontab before creating new one
|
||||
old_crontab = None
|
||||
try:
|
||||
old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME)
|
||||
old_crontab = old_task.crontab
|
||||
except PeriodicTask.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Check if using cron expression (advanced mode)
|
||||
if settings["cron_expression"]:
|
||||
# Parse cron expression: "minute hour day month weekday"
|
||||
try:
|
||||
parts = settings["cron_expression"].split()
|
||||
if len(parts) != 5:
|
||||
raise ValueError("Cron expression must have 5 parts: minute hour day month weekday")
|
||||
|
||||
minute, hour, day_of_month, month_of_year, day_of_week = parts
|
||||
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=day_of_week,
|
||||
day_of_month=day_of_month,
|
||||
month_of_year=month_of_year,
|
||||
timezone=CoreSettings.get_system_time_zone(),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}")
|
||||
raise ValueError(f"Invalid cron expression: {e}")
|
||||
else:
|
||||
# Use simple frequency-based scheduling
|
||||
# Parse time
|
||||
hour, minute = settings["time"].split(":")
|
||||
|
||||
# Build crontab based on frequency
|
||||
system_tz = CoreSettings.get_system_time_zone()
|
||||
if settings["frequency"] == "daily":
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week="*",
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
else: # weekly
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=str(settings["day_of_week"]),
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
|
||||
# Create or update the periodic task
|
||||
task, created = PeriodicTask.objects.update_or_create(
|
||||
name=BACKUP_SCHEDULE_TASK_NAME,
|
||||
defaults={
|
||||
"task": "apps.backups.tasks.scheduled_backup_task",
|
||||
"crontab": crontab,
|
||||
"enabled": True,
|
||||
"kwargs": json.dumps({"retention_count": settings["retention_count"]}),
|
||||
},
|
||||
)
|
||||
|
||||
# Clean up old crontab if it changed and is orphaned
|
||||
if old_crontab and old_crontab.id != crontab.id:
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
|
||||
action = "Created" if created else "Updated"
|
||||
logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}")
|
||||
|
||||
|
||||
def _cleanup_orphaned_crontab(crontab_schedule):
|
||||
"""Delete old CrontabSchedule if no other tasks are using it."""
|
||||
if crontab_schedule is None:
|
||||
return
|
||||
|
||||
# Check if any other tasks are using this crontab
|
||||
if PeriodicTask.objects.filter(crontab=crontab_schedule).exists():
|
||||
logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting")
|
||||
return
|
||||
|
||||
logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}")
|
||||
crontab_schedule.delete()
|
||||
320
apps/backups/services.py
Normal file
320
apps/backups/services.py
Normal file
|
|
@ -0,0 +1,320 @@
|
|||
import datetime
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
import logging
|
||||
import pytz
|
||||
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_backup_dir() -> Path:
|
||||
"""Get the backup directory, creating it if necessary."""
|
||||
backup_dir = Path(settings.BACKUP_ROOT)
|
||||
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
return backup_dir
|
||||
|
||||
|
||||
def _is_postgresql() -> bool:
|
||||
"""Check if we're using PostgreSQL."""
|
||||
return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql"
|
||||
|
||||
|
||||
def _get_pg_env() -> dict:
|
||||
"""Get environment variables for PostgreSQL commands."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
env = os.environ.copy()
|
||||
env["PGPASSWORD"] = db_config.get("PASSWORD", "")
|
||||
return env
|
||||
|
||||
|
||||
def _get_pg_args() -> list[str]:
|
||||
"""Get common PostgreSQL command arguments."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
return [
|
||||
"-h", db_config.get("HOST", "localhost"),
|
||||
"-p", str(db_config.get("PORT", 5432)),
|
||||
"-U", db_config.get("USER", "postgres"),
|
||||
"-d", db_config.get("NAME", "dispatcharr"),
|
||||
]
|
||||
|
||||
|
||||
def _dump_postgresql(output_file: Path) -> None:
|
||||
"""Dump PostgreSQL database using pg_dump."""
|
||||
logger.info("Dumping PostgreSQL database with pg_dump...")
|
||||
|
||||
cmd = [
|
||||
"pg_dump",
|
||||
*_get_pg_args(),
|
||||
"-Fc", # Custom format for pg_restore
|
||||
"-v", # Verbose
|
||||
"-f", str(output_file),
|
||||
]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"pg_dump failed: {result.stderr}")
|
||||
raise RuntimeError(f"pg_dump failed: {result.stderr}")
|
||||
|
||||
logger.debug(f"pg_dump output: {result.stderr}")
|
||||
|
||||
|
||||
def _restore_postgresql(dump_file: Path) -> None:
|
||||
"""Restore PostgreSQL database using pg_restore."""
|
||||
logger.info("[PG_RESTORE] Starting pg_restore...")
|
||||
logger.info(f"[PG_RESTORE] Dump file: {dump_file}")
|
||||
|
||||
pg_args = _get_pg_args()
|
||||
logger.info(f"[PG_RESTORE] Connection args: {pg_args}")
|
||||
|
||||
cmd = [
|
||||
"pg_restore",
|
||||
"--clean", # Clean (drop) database objects before recreating
|
||||
*pg_args,
|
||||
"-v", # Verbose
|
||||
str(dump_file),
|
||||
]
|
||||
|
||||
logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}")
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
logger.info(f"[PG_RESTORE] Return code: {result.returncode}")
|
||||
|
||||
# pg_restore may return non-zero even on partial success
|
||||
# Check for actual errors vs warnings
|
||||
if result.returncode != 0:
|
||||
# Some errors during restore are expected (e.g., "does not exist" when cleaning)
|
||||
# Only fail on critical errors
|
||||
stderr = result.stderr.lower()
|
||||
if "fatal" in stderr or "could not connect" in stderr:
|
||||
logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}")
|
||||
raise RuntimeError(f"pg_restore failed: {result.stderr}")
|
||||
else:
|
||||
logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...")
|
||||
|
||||
logger.info("[PG_RESTORE] Completed successfully")
|
||||
|
||||
|
||||
def _dump_sqlite(output_file: Path) -> None:
|
||||
"""Dump SQLite database using sqlite3 .backup command."""
|
||||
logger.info("Dumping SQLite database with sqlite3 .backup...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
|
||||
if not db_path.exists():
|
||||
raise FileNotFoundError(f"SQLite database not found: {db_path}")
|
||||
|
||||
# Use sqlite3 .backup command via stdin for reliable execution
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=f".backup '{output_file}'\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 backup failed: {result.stderr}")
|
||||
raise RuntimeError(f"sqlite3 backup failed: {result.stderr}")
|
||||
|
||||
# Verify the backup file was created
|
||||
if not output_file.exists():
|
||||
raise RuntimeError("sqlite3 backup failed: output file not created")
|
||||
|
||||
logger.info(f"sqlite3 backup completed successfully: {output_file}")
|
||||
|
||||
|
||||
def _restore_sqlite(dump_file: Path) -> None:
|
||||
"""Restore SQLite database by replacing the database file."""
|
||||
logger.info("Restoring SQLite database...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
backup_current = None
|
||||
|
||||
# Backup current database before overwriting
|
||||
if db_path.exists():
|
||||
backup_current = db_path.with_suffix(".db.bak")
|
||||
shutil.copy2(db_path, backup_current)
|
||||
logger.info(f"Backed up current database to {backup_current}")
|
||||
|
||||
# Ensure parent directory exists
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# The backup file from _dump_sqlite is a complete SQLite database file
|
||||
# We can simply copy it over the existing database
|
||||
shutil.copy2(dump_file, db_path)
|
||||
|
||||
# Verify the restore worked by checking if sqlite3 can read it
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=".tables\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 verification failed: {result.stderr}")
|
||||
# Try to restore from backup
|
||||
if backup_current and backup_current.exists():
|
||||
shutil.copy2(backup_current, db_path)
|
||||
logger.info("Restored original database from backup")
|
||||
raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}")
|
||||
|
||||
logger.info("sqlite3 restore completed successfully")
|
||||
|
||||
|
||||
def create_backup() -> Path:
|
||||
"""
|
||||
Create a backup archive containing database dump and data directories.
|
||||
Returns the path to the created backup file.
|
||||
"""
|
||||
backup_dir = get_backup_dir()
|
||||
|
||||
# Use system timezone for filename (user-friendly), but keep internal timestamps as UTC
|
||||
system_tz_name = CoreSettings.get_system_time_zone()
|
||||
try:
|
||||
system_tz = pytz.timezone(system_tz_name)
|
||||
now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz)
|
||||
timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC")
|
||||
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
|
||||
|
||||
backup_name = f"dispatcharr-backup-{timestamp}.zip"
|
||||
backup_file = backup_dir / backup_name
|
||||
|
||||
logger.info(f"Creating backup: {backup_name}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Determine database type and dump accordingly
|
||||
if _is_postgresql():
|
||||
db_dump_file = temp_path / "database.dump"
|
||||
_dump_postgresql(db_dump_file)
|
||||
db_type = "postgresql"
|
||||
else:
|
||||
db_dump_file = temp_path / "database.sqlite3"
|
||||
_dump_sqlite(db_dump_file)
|
||||
db_type = "sqlite"
|
||||
|
||||
# Create ZIP archive with compression and ZIP64 support for large files
|
||||
with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
|
||||
# Add database dump
|
||||
zip_file.write(db_dump_file, db_dump_file.name)
|
||||
|
||||
# Add metadata
|
||||
metadata = {
|
||||
"format": "dispatcharr-backup",
|
||||
"version": 2,
|
||||
"database_type": db_type,
|
||||
"database_file": db_dump_file.name,
|
||||
"created_at": datetime.datetime.now(datetime.UTC).isoformat(),
|
||||
}
|
||||
zip_file.writestr("metadata.json", json.dumps(metadata, indent=2))
|
||||
|
||||
logger.info(f"Backup created successfully: {backup_file}")
|
||||
return backup_file
|
||||
|
||||
|
||||
def restore_backup(backup_file: Path) -> None:
|
||||
"""
|
||||
Restore from a backup archive.
|
||||
WARNING: This will overwrite the database!
|
||||
"""
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {backup_file}")
|
||||
|
||||
logger.info(f"Restoring from backup: {backup_file}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Extract backup
|
||||
logger.debug("Extracting backup archive...")
|
||||
with ZipFile(backup_file, "r") as zip_file:
|
||||
zip_file.extractall(temp_path)
|
||||
|
||||
# Read metadata
|
||||
metadata_file = temp_path / "metadata.json"
|
||||
if not metadata_file.exists():
|
||||
raise ValueError("Invalid backup: missing metadata.json")
|
||||
|
||||
with open(metadata_file) as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
# Restore database
|
||||
_restore_database(temp_path, metadata)
|
||||
|
||||
logger.info("Restore completed successfully")
|
||||
|
||||
|
||||
def _restore_database(temp_path: Path, metadata: dict) -> None:
|
||||
"""Restore database from backup."""
|
||||
db_type = metadata.get("database_type", "postgresql")
|
||||
db_file = metadata.get("database_file", "database.dump")
|
||||
dump_file = temp_path / db_file
|
||||
|
||||
if not dump_file.exists():
|
||||
raise ValueError(f"Invalid backup: missing {db_file}")
|
||||
|
||||
current_db_type = "postgresql" if _is_postgresql() else "sqlite"
|
||||
|
||||
if db_type != current_db_type:
|
||||
raise ValueError(
|
||||
f"Database type mismatch: backup is {db_type}, "
|
||||
f"but current database is {current_db_type}"
|
||||
)
|
||||
|
||||
if db_type == "postgresql":
|
||||
_restore_postgresql(dump_file)
|
||||
else:
|
||||
_restore_sqlite(dump_file)
|
||||
|
||||
|
||||
def list_backups() -> list[dict]:
|
||||
"""List all available backup files with metadata."""
|
||||
backup_dir = get_backup_dir()
|
||||
backups = []
|
||||
|
||||
for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True):
|
||||
# Use UTC timezone so frontend can convert to user's local time
|
||||
created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC)
|
||||
backups.append({
|
||||
"name": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"created": created_time.isoformat(),
|
||||
})
|
||||
|
||||
return backups
|
||||
|
||||
|
||||
def delete_backup(filename: str) -> None:
|
||||
"""Delete a backup file."""
|
||||
backup_dir = get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {filename}")
|
||||
|
||||
if not backup_file.is_file():
|
||||
raise ValueError(f"Invalid backup file: {filename}")
|
||||
|
||||
backup_file.unlink()
|
||||
logger.info(f"Deleted backup: {filename}")
|
||||
106
apps/backups/tasks.py
Normal file
106
apps/backups/tasks.py
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import logging
|
||||
import traceback
|
||||
from celery import shared_task
|
||||
|
||||
from . import services
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup_old_backups(retention_count: int) -> int:
|
||||
"""Delete old backups, keeping only the most recent N. Returns count deleted."""
|
||||
if retention_count <= 0:
|
||||
return 0
|
||||
|
||||
backups = services.list_backups()
|
||||
if len(backups) <= retention_count:
|
||||
return 0
|
||||
|
||||
# Backups are sorted newest first, so delete from the end
|
||||
to_delete = backups[retention_count:]
|
||||
deleted = 0
|
||||
|
||||
for backup in to_delete:
|
||||
try:
|
||||
services.delete_backup(backup["name"])
|
||||
deleted += 1
|
||||
logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}")
|
||||
except Exception as e:
|
||||
logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def create_backup_task(self):
|
||||
"""Celery task to create a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[BACKUP] Starting backup task {self.request.id}")
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_backup_task(self, filename: str):
|
||||
"""Celery task to restore a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}")
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
logger.info(f"[RESTORE] Backup file path: {backup_file}")
|
||||
services.restore_backup(backup_file)
|
||||
logger.info(f"[RESTORE] Task {self.request.id} completed successfully")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": filename,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def scheduled_backup_task(self, retention_count: int = 0):
|
||||
"""Celery task for scheduled backups with optional retention cleanup."""
|
||||
try:
|
||||
logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}")
|
||||
|
||||
# Create backup
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[SCHEDULED] Backup created: {backup_file.name}")
|
||||
|
||||
# Cleanup old backups if retention is set
|
||||
deleted = 0
|
||||
if retention_count > 0:
|
||||
deleted = _cleanup_old_backups(retention_count)
|
||||
logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"deleted_count": deleted,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
1163
apps/backups/tests.py
Normal file
1163
apps/backups/tests.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -47,7 +47,7 @@ urlpatterns = [
|
|||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ from drf_yasg.utils import swagger_auto_schema
|
|||
from drf_yasg import openapi
|
||||
from django.shortcuts import get_object_or_404, get_list_or_404
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
import os, json, requests, logging
|
||||
from urllib.parse import unquote
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
IsAdmin,
|
||||
|
|
@ -419,10 +421,36 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
group_names = channel_group.split(",")
|
||||
qs = qs.filter(channel_group__name__in=group_names)
|
||||
|
||||
if self.request.user.user_level < 10:
|
||||
qs = qs.filter(user_level__lte=self.request.user.user_level)
|
||||
filters = {}
|
||||
q_filters = Q()
|
||||
|
||||
return qs
|
||||
channel_profile_id = self.request.query_params.get("channel_profile_id")
|
||||
show_disabled_param = self.request.query_params.get("show_disabled", None)
|
||||
only_streamless = self.request.query_params.get("only_streamless", None)
|
||||
|
||||
if channel_profile_id:
|
||||
try:
|
||||
profile_id_int = int(channel_profile_id)
|
||||
filters["channelprofilemembership__channel_profile_id"] = profile_id_int
|
||||
|
||||
if show_disabled_param is None:
|
||||
filters["channelprofilemembership__enabled"] = True
|
||||
except (ValueError, TypeError):
|
||||
# Ignore invalid profile id values
|
||||
pass
|
||||
|
||||
if only_streamless:
|
||||
q_filters &= Q(streams__isnull=True)
|
||||
|
||||
if self.request.user.user_level < 10:
|
||||
filters["user_level__lte"] = self.request.user.user_level
|
||||
|
||||
if filters:
|
||||
qs = qs.filter(**filters)
|
||||
if q_filters:
|
||||
qs = qs.filter(q_filters)
|
||||
|
||||
return qs.distinct()
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
|
|
@ -2026,7 +2054,7 @@ class DeleteSeriesRuleAPIView(APIView):
|
|||
return [Authenticated()]
|
||||
|
||||
def delete(self, request, tvg_id):
|
||||
tvg_id = str(tvg_id)
|
||||
tvg_id = unquote(str(tvg_id or ""))
|
||||
rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id]
|
||||
CoreSettings.set_dvr_series_rules(rules)
|
||||
return Response({"success": True, "rules": rules})
|
||||
|
|
|
|||
|
|
@ -119,11 +119,11 @@ class Stream(models.Model):
|
|||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
|
|
|
|||
|
|
@ -792,7 +792,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
group_title = group_name
|
||||
|
||||
stream_hash = Stream.generate_hash_key(
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title
|
||||
)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
|
|
@ -966,7 +966,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
)
|
||||
continue
|
||||
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id)
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -161,18 +161,7 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
else:
|
||||
if profile_name is not None:
|
||||
try:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
except ChannelProfile.DoesNotExist:
|
||||
logger.warning("Requested channel profile (%s) during m3u generation does not exist", profile_name)
|
||||
raise Http404(f"Channel profile '{profile_name}' not found")
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
|
||||
# Check if the request wants to use direct logo URLs instead of cache
|
||||
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
|
||||
|
|
@ -2303,31 +2292,41 @@ def xc_get_epg(request, user, short=False):
|
|||
output = {"epg_listings": []}
|
||||
|
||||
for program in programs:
|
||||
id = "0"
|
||||
epg_id = "0"
|
||||
title = program['title'] if isinstance(program, dict) else program.title
|
||||
description = program['description'] if isinstance(program, dict) else program.description
|
||||
|
||||
start = program["start_time"] if isinstance(program, dict) else program.start_time
|
||||
end = program["end_time"] if isinstance(program, dict) else program.end_time
|
||||
|
||||
# For database programs, use actual ID; for generated dummy programs, create synthetic ID
|
||||
if isinstance(program, dict):
|
||||
# Generated dummy program - create unique ID from channel + timestamp
|
||||
program_id = str(abs(hash(f"{channel_id}_{int(start.timestamp())}")))
|
||||
else:
|
||||
# Database program - use actual ID
|
||||
program_id = str(program.id)
|
||||
|
||||
# epg_id refers to the EPG source/channel mapping in XC panels
|
||||
# Use the actual EPGData ID when available, otherwise fall back to 0
|
||||
epg_id = str(channel.epg_data.id) if channel.epg_data else "0"
|
||||
|
||||
program_output = {
|
||||
"id": f"{id}",
|
||||
"epg_id": f"{epg_id}",
|
||||
"title": base64.b64encode(title.encode()).decode(),
|
||||
"id": program_id,
|
||||
"epg_id": epg_id,
|
||||
"title": base64.b64encode((title or "").encode()).decode(),
|
||||
"lang": "",
|
||||
"start": start.strftime("%Y%m%d%H%M%S"),
|
||||
"end": end.strftime("%Y%m%d%H%M%S"),
|
||||
"description": base64.b64encode(description.encode()).decode(),
|
||||
"channel_id": channel_num_int,
|
||||
"start_timestamp": int(start.timestamp()),
|
||||
"stop_timestamp": int(end.timestamp()),
|
||||
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"end": end.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"description": base64.b64encode((description or "").encode()).decode(),
|
||||
"channel_id": str(channel_num_int),
|
||||
"start_timestamp": str(int(start.timestamp())),
|
||||
"stop_timestamp": str(int(end.timestamp())),
|
||||
"stream_id": f"{channel_id}",
|
||||
}
|
||||
|
||||
if short == False:
|
||||
program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0
|
||||
program_output["has_archive"] = "0"
|
||||
program_output["has_archive"] = 0
|
||||
|
||||
output['epg_listings'].append(program_output)
|
||||
|
||||
|
|
@ -2532,34 +2531,45 @@ def xc_get_series_info(request, user, series_id):
|
|||
except Exception as e:
|
||||
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
||||
|
||||
# Get episodes for this series from the same M3U account
|
||||
episode_relations = M3UEpisodeRelation.objects.filter(
|
||||
episode__series=series,
|
||||
m3u_account=series_relation.m3u_account
|
||||
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
|
||||
# Get unique episodes for this series that have relations from any active M3U account
|
||||
# We query episodes directly to avoid duplicates when multiple relations exist
|
||||
# (e.g., same episode in different languages/qualities)
|
||||
from apps.vod.models import Episode
|
||||
episodes = Episode.objects.filter(
|
||||
series=series,
|
||||
m3u_relations__m3u_account__is_active=True
|
||||
).distinct().order_by('season_number', 'episode_number')
|
||||
|
||||
# Group episodes by season
|
||||
seasons = {}
|
||||
for relation in episode_relations:
|
||||
episode = relation.episode
|
||||
for episode in episodes:
|
||||
season_num = episode.season_number or 1
|
||||
if season_num not in seasons:
|
||||
seasons[season_num] = []
|
||||
|
||||
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
|
||||
# Get the highest priority relation for this episode (for container_extension, video/audio/bitrate)
|
||||
from apps.vod.models import M3UEpisodeRelation
|
||||
first_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode
|
||||
best_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
video = audio = bitrate = None
|
||||
if first_relation and first_relation.custom_properties:
|
||||
info = first_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
container_extension = "mp4"
|
||||
added_timestamp = str(int(episode.created_at.timestamp()))
|
||||
|
||||
if best_relation:
|
||||
container_extension = best_relation.container_extension or "mp4"
|
||||
added_timestamp = str(int(best_relation.created_at.timestamp()))
|
||||
if best_relation.custom_properties:
|
||||
info = best_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
|
||||
if video is None:
|
||||
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
||||
if audio is None:
|
||||
|
|
@ -2572,8 +2582,8 @@ def xc_get_series_info(request, user, series_id):
|
|||
"season": season_num,
|
||||
"episode_num": episode.episode_number or 0,
|
||||
"title": episode.name,
|
||||
"container_extension": relation.container_extension or "mp4",
|
||||
"added": str(int(relation.created_at.timestamp())),
|
||||
"container_extension": container_extension,
|
||||
"added": added_timestamp,
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
"info": {
|
||||
|
|
@ -2889,7 +2899,7 @@ def xc_series_stream(request, username, password, stream_id, extension):
|
|||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
try:
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||
except M3UEpisodeRelation.DoesNotExist:
|
||||
return JsonResponse({"error": "Episode not found"}, status=404)
|
||||
|
||||
|
|
|
|||
|
|
@ -48,9 +48,11 @@ class ClientManager:
|
|||
# Import here to avoid potential import issues
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
import redis
|
||||
from django.conf import settings
|
||||
|
||||
# Get all channels from Redis
|
||||
redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True)
|
||||
# Get all channels from Redis using settings
|
||||
redis_url = getattr(settings, 'REDIS_URL', 'redis://localhost:6379/0')
|
||||
redis_client = redis.Redis.from_url(redis_url, decode_responses=True)
|
||||
all_channels = []
|
||||
cursor = 0
|
||||
|
||||
|
|
|
|||
|
|
@ -97,7 +97,11 @@ class PersistentVODConnection:
|
|||
# First check if we have a pre-stored content length from HEAD request
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{self.session_id}"
|
||||
stored_length = r.get(content_length_key)
|
||||
if stored_length:
|
||||
|
|
|
|||
|
|
@ -24,6 +24,11 @@ from apps.m3u.models import M3UAccountProfile
|
|||
logger = logging.getLogger("vod_proxy")
|
||||
|
||||
|
||||
def get_vod_client_stop_key(client_id):
|
||||
"""Get the Redis key for signaling a VOD client to stop"""
|
||||
return f"vod_proxy:client:{client_id}:stop"
|
||||
|
||||
|
||||
def infer_content_type_from_url(url: str) -> Optional[str]:
|
||||
"""
|
||||
Infer MIME type from file extension in URL
|
||||
|
|
@ -832,6 +837,7 @@ class MultiWorkerVODConnectionManager:
|
|||
# Create streaming generator
|
||||
def stream_generator():
|
||||
decremented = False
|
||||
stop_signal_detected = False
|
||||
try:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
||||
|
||||
|
|
@ -846,14 +852,25 @@ class MultiWorkerVODConnectionManager:
|
|||
bytes_sent = 0
|
||||
chunk_count = 0
|
||||
|
||||
# Get the stop signal key for this client
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
|
||||
for chunk in upstream_response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
yield chunk
|
||||
bytes_sent += len(chunk)
|
||||
chunk_count += 1
|
||||
|
||||
# Update activity every 100 chunks in consolidated connection state
|
||||
# Check for stop signal every 100 chunks
|
||||
if chunk_count % 100 == 0:
|
||||
# Check if stop signal has been set
|
||||
if self.redis_client and self.redis_client.exists(stop_key):
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stop signal detected, terminating stream")
|
||||
# Delete the stop key
|
||||
self.redis_client.delete(stop_key)
|
||||
stop_signal_detected = True
|
||||
break
|
||||
|
||||
# Update the connection state
|
||||
logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}")
|
||||
if redis_connection._acquire_lock():
|
||||
|
|
@ -867,7 +884,10 @@ class MultiWorkerVODConnectionManager:
|
|||
finally:
|
||||
redis_connection._release_lock()
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
if stop_signal_detected:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stream stopped by signal: {bytes_sent} bytes sent")
|
||||
else:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
|
||||
|
|
|
|||
|
|
@ -21,4 +21,7 @@ urlpatterns = [
|
|||
|
||||
# VOD Stats
|
||||
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
||||
|
||||
# Stop VOD client connection
|
||||
path('stop_client/', views.stop_vod_client, name='stop_vod_client'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from django.views import View
|
|||
from apps.vod.models import Movie, Series, Episode
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url, get_vod_client_stop_key
|
||||
from .utils import get_client_info, create_vod_response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -329,7 +329,11 @@ class VODStreamView(View):
|
|||
# Store the total content length in Redis for the persistent connection to use
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{session_id}"
|
||||
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
||||
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
||||
|
|
@ -1011,3 +1015,59 @@ class VODStatsView(View):
|
|||
except Exception as e:
|
||||
logger.error(f"Error getting VOD stats: {e}")
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from apps.accounts.permissions import IsAdmin
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdmin])
|
||||
def stop_vod_client(request):
|
||||
"""Stop a specific VOD client connection using stop signal mechanism"""
|
||||
try:
|
||||
# Parse request body
|
||||
import json
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
client_id = data.get('client_id')
|
||||
if not client_id:
|
||||
return JsonResponse({'error': 'No client_id provided'}, status=400)
|
||||
|
||||
logger.info(f"Request to stop VOD client: {client_id}")
|
||||
|
||||
# Get Redis client
|
||||
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||
redis_client = connection_manager.redis_client
|
||||
|
||||
if not redis_client:
|
||||
return JsonResponse({'error': 'Redis not available'}, status=500)
|
||||
|
||||
# Check if connection exists
|
||||
connection_key = f"vod_persistent_connection:{client_id}"
|
||||
connection_data = redis_client.hgetall(connection_key)
|
||||
if not connection_data:
|
||||
logger.warning(f"VOD connection not found: {client_id}")
|
||||
return JsonResponse({'error': 'Connection not found'}, status=404)
|
||||
|
||||
# Set a stop signal key that the worker will check
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
redis_client.setex(stop_key, 60, "true") # 60 second TTL
|
||||
|
||||
logger.info(f"Set stop signal for VOD client: {client_id}")
|
||||
|
||||
return JsonResponse({
|
||||
'message': 'VOD client stop signal sent',
|
||||
'client_id': client_id,
|
||||
'stop_key': stop_key
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping VOD client: {e}", exc_info=True)
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
|
|||
|
||||
|
||||
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
|
||||
"""Process episodes in batches for better performance"""
|
||||
"""Process episodes in batches for better performance.
|
||||
|
||||
Note: Multiple streams can represent the same episode (e.g., different languages
|
||||
or qualities). Each stream has a unique stream_id, but they share the same
|
||||
season/episode number. We create one Episode record per (series, season, episode)
|
||||
and multiple M3UEpisodeRelation records pointing to it.
|
||||
"""
|
||||
if not episodes_data:
|
||||
return
|
||||
|
||||
|
|
@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
|
||||
|
||||
# Extract episode identifiers
|
||||
episode_keys = []
|
||||
# Note: episode_keys may have duplicates when multiple streams represent same episode
|
||||
episode_keys = set() # Use set to track unique episode keys
|
||||
episode_ids = []
|
||||
for episode_data in all_episodes_data:
|
||||
season_num = episode_data['_season_number']
|
||||
episode_num = episode_data.get('episode_num', 0)
|
||||
episode_keys.append((series.id, season_num, episode_num))
|
||||
episode_keys.add((series.id, season_num, episode_num))
|
||||
episode_ids.append(str(episode_data.get('id')))
|
||||
|
||||
# Pre-fetch existing episodes
|
||||
|
|
@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
relations_to_create = []
|
||||
relations_to_update = []
|
||||
|
||||
# Track episodes we're creating in this batch to avoid duplicates
|
||||
# Key: (series_id, season_number, episode_number) -> Episode object
|
||||
episodes_pending_creation = {}
|
||||
|
||||
for episode_data in all_episodes_data:
|
||||
try:
|
||||
episode_id = str(episode_data.get('id'))
|
||||
|
|
@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
if backdrop:
|
||||
custom_props['backdrop_path'] = [backdrop]
|
||||
|
||||
# Find existing episode
|
||||
# Find existing episode - check DB first, then pending creations
|
||||
episode_key = (series.id, season_number, episode_number)
|
||||
episode = existing_episodes.get(episode_key)
|
||||
|
||||
# Check if we already have this episode pending creation (multiple streams for same episode)
|
||||
if not episode and episode_key in episodes_pending_creation:
|
||||
episode = episodes_pending_creation[episode_key]
|
||||
logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})")
|
||||
|
||||
if episode:
|
||||
# Update existing episode
|
||||
updated = False
|
||||
|
|
@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
episode.custom_properties = custom_props if custom_props else None
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
# Only add to update list if episode has a PK (exists in DB) and isn't already in list
|
||||
# Episodes pending creation don't have PKs yet and will be created via bulk_create
|
||||
if updated and episode.pk and episode not in episodes_to_update:
|
||||
episodes_to_update.append(episode)
|
||||
else:
|
||||
# Create new episode
|
||||
|
|
@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
custom_properties=custom_props if custom_props else None
|
||||
)
|
||||
episodes_to_create.append(episode)
|
||||
# Track this episode so subsequent streams with same season/episode can reuse it
|
||||
episodes_pending_creation[episode_key] = episode
|
||||
|
||||
# Handle episode relation
|
||||
if episode_id in existing_relations:
|
||||
|
|
@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
|
||||
# Execute batch operations
|
||||
with transaction.atomic():
|
||||
# Create new episodes
|
||||
# Create new episodes - use ignore_conflicts in case of race conditions
|
||||
if episodes_to_create:
|
||||
Episode.objects.bulk_create(episodes_to_create)
|
||||
Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True)
|
||||
|
||||
# Re-fetch the created episodes to get their PKs
|
||||
# We need to do this because bulk_create with ignore_conflicts doesn't set PKs
|
||||
created_episode_keys = [
|
||||
(ep.series_id, ep.season_number, ep.episode_number)
|
||||
for ep in episodes_to_create
|
||||
]
|
||||
db_episodes = Episode.objects.filter(series=series)
|
||||
episode_pk_map = {
|
||||
(ep.series_id, ep.season_number, ep.episode_number): ep
|
||||
for ep in db_episodes
|
||||
}
|
||||
|
||||
# Update relations to point to the actual DB episodes with PKs
|
||||
for relation in relations_to_create:
|
||||
ep = relation.episode
|
||||
key = (ep.series_id, ep.season_number, ep.episode_number)
|
||||
if key in episode_pk_map:
|
||||
relation.episode = episode_pk_map[key]
|
||||
|
||||
# Update existing episodes
|
||||
if episodes_to_update:
|
||||
|
|
@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
'tmdb_id', 'imdb_id', 'custom_properties'
|
||||
])
|
||||
|
||||
# Create new episode relations
|
||||
# Create new episode relations - use ignore_conflicts for stream_id duplicates
|
||||
if relations_to_create:
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create)
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
# Update existing episode relations
|
||||
if relations_to_update:
|
||||
|
|
|
|||
|
|
@ -513,7 +513,8 @@ def rehash_streams(keys):
|
|||
|
||||
for obj in batch:
|
||||
# Generate new hash
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id)
|
||||
group_name = obj.channel_group.name if obj.channel_group else None
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id, group=group_name)
|
||||
|
||||
# Check if this hash already exists in our tracking dict or in database
|
||||
if new_hash in hash_keys:
|
||||
|
|
|
|||
|
|
@ -37,7 +37,9 @@ def stream_view(request, channel_uuid):
|
|||
"""
|
||||
try:
|
||||
redis_host = getattr(settings, "REDIS_HOST", "localhost")
|
||||
redis_client = redis.Redis(host=settings.REDIS_HOST, port=6379, db=int(getattr(settings, "REDIS_DB", "0")))
|
||||
redis_port = int(getattr(settings, "REDIS_PORT", 6379))
|
||||
redis_db = int(getattr(settings, "REDIS_DB", "0"))
|
||||
redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||
|
||||
# Retrieve the channel by the provided stream_id.
|
||||
channel = Channel.objects.get(uuid=channel_uuid)
|
||||
|
|
|
|||
|
|
@ -73,8 +73,12 @@ class PersistentLock:
|
|||
|
||||
# Example usage (for testing purposes only):
|
||||
if __name__ == "__main__":
|
||||
# Connect to Redis on localhost; adjust connection parameters as needed.
|
||||
client = redis.Redis(host="localhost", port=6379, db=0)
|
||||
import os
|
||||
# Connect to Redis using environment variables; adjust connection parameters as needed.
|
||||
redis_host = os.environ.get("REDIS_HOST", "localhost")
|
||||
redis_port = int(os.environ.get("REDIS_PORT", 6379))
|
||||
redis_db = int(os.environ.get("REDIS_DB", 0))
|
||||
client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||
lock = PersistentLock(client, "lock:example_account", lock_timeout=120)
|
||||
|
||||
if lock.acquire():
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ BASE_DIR = Path(__file__).resolve().parent.parent
|
|||
|
||||
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY")
|
||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||
REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
|
||||
REDIS_DB = os.environ.get("REDIS_DB", "0")
|
||||
|
||||
# Set DEBUG to True for development, False for production
|
||||
|
|
@ -118,7 +119,7 @@ CHANNEL_LAYERS = {
|
|||
"default": {
|
||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [(REDIS_HOST, 6379, REDIS_DB)], # Ensure Redis is running
|
||||
"hosts": [(REDIS_HOST, REDIS_PORT, REDIS_DB)], # Ensure Redis is running
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -184,8 +185,10 @@ STATICFILES_DIRS = [
|
|||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
# Build default Redis URL from components for Celery
|
||||
_default_redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", _default_redis_url)
|
||||
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", CELERY_BROKER_URL)
|
||||
|
||||
# Configure Redis key prefix
|
||||
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
|
||||
|
|
@ -226,6 +229,13 @@ CELERY_BEAT_SCHEDULE = {
|
|||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
MEDIA_URL = "/media/"
|
||||
|
||||
# Backup settings
|
||||
BACKUP_ROOT = os.environ.get("BACKUP_ROOT", "/data/backups")
|
||||
BACKUP_DATA_DIRS = [
|
||||
os.environ.get("LOGOS_DIR", "/data/logos"),
|
||||
os.environ.get("UPLOADS_DIR", "/data/uploads"),
|
||||
os.environ.get("PLUGINS_DIR", "/data/plugins"),
|
||||
]
|
||||
|
||||
SERVER_IP = "127.0.0.1"
|
||||
|
||||
|
|
@ -242,7 +252,7 @@ SIMPLE_JWT = {
|
|||
}
|
||||
|
||||
# Redis connection settings
|
||||
REDIS_URL = "redis://localhost:6379/0"
|
||||
REDIS_URL = os.environ.get("REDIS_URL", f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
|
||||
REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds
|
||||
REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds
|
||||
|
|
|
|||
|
|
@ -1,11 +1,65 @@
|
|||
#!/bin/bash
|
||||
docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile ..
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Default values
|
||||
VERSION=$(python3 -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
||||
REGISTRY="dispatcharr" # Registry or private repo to push to
|
||||
IMAGE="dispatcharr" # Image that we're building
|
||||
BRANCH="dev"
|
||||
ARCH="" # Architectures to build for, e.g. linux/amd64,linux/arm64
|
||||
PUSH=false
|
||||
|
||||
usage() {
|
||||
cat <<- EOF
|
||||
To test locally:
|
||||
./build-dev.sh
|
||||
|
||||
To build and push to registry:
|
||||
./build-dev.sh -p
|
||||
|
||||
To build and push to a private registry:
|
||||
./build-dev.sh -p -r myregistry:5000
|
||||
|
||||
To build for -both- x86_64 and arm_64:
|
||||
./build-dev.sh -p -a linux/amd64,linux/arm64
|
||||
|
||||
Do it all:
|
||||
./build-dev.sh -p -r myregistry:5000 -a linux/amd64,linux/arm64
|
||||
EOF
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Parse options
|
||||
while getopts "pr:a:b:i:h" opt; do
|
||||
case $opt in
|
||||
r) REGISTRY="$OPTARG" ;;
|
||||
a) ARCH="--platform $OPTARG" ;;
|
||||
b) BRANCH="$OPTARG" ;;
|
||||
i) IMAGE="$OPTARG" ;;
|
||||
p) PUSH=true ;;
|
||||
h) usage ;;
|
||||
\?) echo "Invalid option: -$OPTARG" >&2; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
BUILD_ARGS="BRANCH=$BRANCH"
|
||||
|
||||
echo docker build --build-arg $BUILD_ARGS $ARCH -t $IMAGE
|
||||
docker build -f Dockerfile --build-arg $BUILD_ARGS $ARCH -t $IMAGE ..
|
||||
docker tag $IMAGE $IMAGE:$BRANCH
|
||||
docker tag $IMAGE $IMAGE:$VERSION
|
||||
|
||||
if [ -z "$PUSH" ]; then
|
||||
echo "Please run 'docker push -t $IMAGE:dev -t $IMAGE:${VERSION}' when ready"
|
||||
else
|
||||
for TAG in latest "$VERSION" "$BRANCH"; do
|
||||
docker tag "$IMAGE" "$REGISTRY/$IMAGE:$TAG"
|
||||
docker push -q "$REGISTRY/$IMAGE:$TAG"
|
||||
done
|
||||
echo "Images pushed successfully."
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
# Get version information
|
||||
VERSION=$(python -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
||||
|
||||
# Build with version tag
|
||||
docker build --build-arg BRANCH=dev \
|
||||
-t dispatcharr/dispatcharr:dev \
|
||||
-t dispatcharr/dispatcharr:${VERSION} \
|
||||
-f Dockerfile ..
|
||||
|
|
|
|||
|
|
@ -29,9 +29,17 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then
|
|||
chown $PUID:$PGID /app
|
||||
fi
|
||||
fi
|
||||
|
||||
# Configure nginx port
|
||||
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
||||
|
||||
# Configure nginx based on IPv6 availability
|
||||
if ip -6 addr show | grep -q "inet6"; then
|
||||
echo "✅ IPv6 is available, enabling IPv6 in nginx"
|
||||
else
|
||||
echo "⚠️ IPv6 not available, disabling IPv6 in nginx"
|
||||
sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default
|
||||
fi
|
||||
|
||||
# NOTE: mac doesn't run as root, so only manage permissions
|
||||
# if this script is running as root
|
||||
if [ "$(id -u)" = "0" ]; then
|
||||
|
|
|
|||
|
|
@ -35,6 +35,13 @@ server {
|
|||
root /data;
|
||||
}
|
||||
|
||||
# Internal location for X-Accel-Redirect backup downloads
|
||||
# Django handles auth, nginx serves the file directly
|
||||
location /protected-backups/ {
|
||||
internal;
|
||||
alias /data/backups/;
|
||||
}
|
||||
|
||||
location /api/logos/(?<logo_id>\d+)/cache/ {
|
||||
proxy_pass http://127.0.0.1:5656;
|
||||
proxy_cache logo_cache;
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ module = dispatcharr.wsgi:application
|
|||
virtualenv = /dispatcharrpy
|
||||
master = true
|
||||
env = DJANGO_SETTINGS_MODULE=dispatcharr.settings
|
||||
env = USE_NGINX_ACCEL=true
|
||||
socket = /app/uwsgi.sock
|
||||
chmod-socket = 777
|
||||
vacuum = true
|
||||
|
|
|
|||
1317
frontend/package-lock.json
generated
1317
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -54,18 +54,21 @@
|
|||
"@types/react": "^19.1.0",
|
||||
"@types/react-dom": "^19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^4.1.0",
|
||||
"eslint": "^9.21.0",
|
||||
"eslint": "^9.27.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.19",
|
||||
"globals": "^15.15.0",
|
||||
"jsdom": "^27.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"vite": "^6.2.0",
|
||||
"vite": "^7.1.7",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"resolutions": {
|
||||
"vite": "7.1.7",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"overrides": {
|
||||
"js-yaml": "^4.1.1"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1349,6 +1349,183 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
// Backup API (async with Celery task polling)
|
||||
static async listBackups() {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/`);
|
||||
return response || [];
|
||||
} catch (e) {
|
||||
errorNotification('Failed to load backups', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getBackupStatus(taskId, token = null) {
|
||||
try {
|
||||
let url = `${host}/api/backups/status/${taskId}/`;
|
||||
if (token) {
|
||||
url += `?token=${encodeURIComponent(token)}`;
|
||||
}
|
||||
const response = await request(url, { auth: !token });
|
||||
return response;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async waitForBackupTask(taskId, onProgress, token = null) {
|
||||
const pollInterval = 2000; // Poll every 2 seconds
|
||||
const maxAttempts = 300; // Max 10 minutes (300 * 2s)
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
try {
|
||||
const status = await API.getBackupStatus(taskId, token);
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(status);
|
||||
}
|
||||
|
||||
if (status.state === 'completed') {
|
||||
return status.result;
|
||||
} else if (status.state === 'failed') {
|
||||
throw new Error(status.error || 'Task failed');
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
||||
}
|
||||
|
||||
throw new Error('Task timed out');
|
||||
}
|
||||
|
||||
static async createBackup(onProgress) {
|
||||
try {
|
||||
// Start the backup task
|
||||
const response = await request(`${host}/api/backups/create/`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
// Wait for the task to complete using token for auth
|
||||
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
|
||||
return result;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to create backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async uploadBackup(file) {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await request(
|
||||
`${host}/api/backups/upload/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to upload backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteBackup(filename) {
|
||||
try {
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
await request(`${host}/api/backups/${encodedFilename}/delete/`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification('Failed to delete backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getDownloadToken(filename) {
|
||||
// Get a download token from the server
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/${encodeURIComponent(filename)}/download-token/`);
|
||||
return response.token;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async downloadBackup(filename) {
|
||||
try {
|
||||
// Get a download token first (requires auth)
|
||||
const token = await API.getDownloadToken(filename);
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
|
||||
// Build the download URL with token
|
||||
const downloadUrl = `${host}/api/backups/${encodedFilename}/download/?token=${encodeURIComponent(token)}`;
|
||||
|
||||
// Use direct browser navigation instead of fetch to avoid CORS issues
|
||||
const link = document.createElement('a');
|
||||
link.href = downloadUrl;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
|
||||
return { filename };
|
||||
} catch (e) {
|
||||
errorNotification('Failed to download backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async restoreBackup(filename, onProgress) {
|
||||
try {
|
||||
// Start the restore task
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
const response = await request(
|
||||
`${host}/api/backups/${encodedFilename}/restore/`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
|
||||
// Wait for the task to complete using token for auth
|
||||
// Token-based auth allows status polling even after DB restore invalidates user sessions
|
||||
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
|
||||
return result;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to restore backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getBackupSchedule() {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/schedule/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to get backup schedule', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async updateBackupSchedule(settings) {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/schedule/update/`, {
|
||||
method: 'PUT',
|
||||
body: settings,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to update backup schedule', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getVersion() {
|
||||
try {
|
||||
const response = await request(`${host}/api/core/version/`, {
|
||||
|
|
@ -1514,6 +1691,19 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async stopVODClient(clientId) {
|
||||
try {
|
||||
const response = await request(`${host}/proxy/vod/stop_client/`, {
|
||||
method: 'POST',
|
||||
body: { client_id: clientId },
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to stop VOD client', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async stopChannel(id) {
|
||||
try {
|
||||
const response = await request(`${host}/proxy/ts/stop/${id}`, {
|
||||
|
|
@ -2131,7 +2321,8 @@ export default class API {
|
|||
|
||||
static async deleteSeriesRule(tvgId) {
|
||||
try {
|
||||
await request(`${host}/api/channels/series-rules/${tvgId}/`, { method: 'DELETE' });
|
||||
const encodedTvgId = encodeURIComponent(tvgId);
|
||||
await request(`${host}/api/channels/series-rules/${encodedTvgId}/`, { method: 'DELETE' });
|
||||
notifications.show({ title: 'Series rule removed' });
|
||||
} catch (e) {
|
||||
errorNotification('Failed to remove series rule', e);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
// frontend/src/components/FloatingVideo.js
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import Draggable from 'react-draggable';
|
||||
import useVideoStore from '../store/useVideoStore';
|
||||
import mpegts from 'mpegts.js';
|
||||
|
|
@ -17,7 +17,94 @@ export default function FloatingVideo() {
|
|||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [loadError, setLoadError] = useState(null);
|
||||
const [showOverlay, setShowOverlay] = useState(true);
|
||||
const [videoSize, setVideoSize] = useState({ width: 320, height: 180 });
|
||||
const [isResizing, setIsResizing] = useState(false);
|
||||
const resizeStateRef = useRef(null);
|
||||
const overlayTimeoutRef = useRef(null);
|
||||
const aspectRatioRef = useRef(320 / 180);
|
||||
const [dragPosition, setDragPosition] = useState(null);
|
||||
const dragPositionRef = useRef(null);
|
||||
const dragOffsetRef = useRef({ x: 0, y: 0 });
|
||||
const initialPositionRef = useRef(null);
|
||||
|
||||
const MIN_WIDTH = 220;
|
||||
const MIN_HEIGHT = 124;
|
||||
const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging
|
||||
const HEADER_HEIGHT = 38; // height of the close button header area
|
||||
const ERROR_HEIGHT = 45; // approximate height of error message area when displayed
|
||||
const HANDLE_SIZE = 18;
|
||||
const HANDLE_OFFSET = 0;
|
||||
const resizeHandleBaseStyle = {
|
||||
position: 'absolute',
|
||||
width: HANDLE_SIZE,
|
||||
height: HANDLE_SIZE,
|
||||
backgroundColor: 'transparent',
|
||||
borderRadius: 6,
|
||||
zIndex: 8,
|
||||
touchAction: 'none',
|
||||
};
|
||||
const resizeHandles = [
|
||||
{
|
||||
id: 'bottom-right',
|
||||
cursor: 'nwse-resize',
|
||||
xDir: 1,
|
||||
yDir: 1,
|
||||
isLeft: false,
|
||||
isTop: false,
|
||||
style: {
|
||||
bottom: HANDLE_OFFSET,
|
||||
right: HANDLE_OFFSET,
|
||||
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 0 6px 0',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bottom-left',
|
||||
cursor: 'nesw-resize',
|
||||
xDir: -1,
|
||||
yDir: 1,
|
||||
isLeft: true,
|
||||
isTop: false,
|
||||
style: {
|
||||
bottom: HANDLE_OFFSET,
|
||||
left: HANDLE_OFFSET,
|
||||
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 0 0 6px',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'top-right',
|
||||
cursor: 'nesw-resize',
|
||||
xDir: 1,
|
||||
yDir: -1,
|
||||
isLeft: false,
|
||||
isTop: true,
|
||||
style: {
|
||||
top: HANDLE_OFFSET,
|
||||
right: HANDLE_OFFSET,
|
||||
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 6px 0 0',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'top-left',
|
||||
cursor: 'nwse-resize',
|
||||
xDir: -1,
|
||||
yDir: -1,
|
||||
isLeft: true,
|
||||
isTop: true,
|
||||
style: {
|
||||
top: HANDLE_OFFSET,
|
||||
left: HANDLE_OFFSET,
|
||||
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '6px 0 0 0',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Safely destroy the mpegts player to prevent errors
|
||||
const safeDestroyPlayer = () => {
|
||||
|
|
@ -315,24 +402,319 @@ export default function FloatingVideo() {
|
|||
}, 50);
|
||||
};
|
||||
|
||||
const clampToVisible = useCallback(
|
||||
(x, y) => {
|
||||
if (typeof window === 'undefined') return { x, y };
|
||||
|
||||
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const minX = -(videoSize.width - VISIBLE_MARGIN);
|
||||
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||
const maxX = window.innerWidth - videoSize.width;
|
||||
const maxY = window.innerHeight - totalHeight;
|
||||
|
||||
return {
|
||||
x: Math.min(Math.max(x, minX), maxX),
|
||||
y: Math.min(Math.max(y, minY), maxY),
|
||||
};
|
||||
},
|
||||
[
|
||||
VISIBLE_MARGIN,
|
||||
HEADER_HEIGHT,
|
||||
ERROR_HEIGHT,
|
||||
videoSize.height,
|
||||
videoSize.width,
|
||||
]
|
||||
);
|
||||
|
||||
const clampToVisibleWithSize = useCallback(
|
||||
(x, y, width, height) => {
|
||||
if (typeof window === 'undefined') return { x, y };
|
||||
|
||||
const totalHeight = height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const minX = -(width - VISIBLE_MARGIN);
|
||||
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||
const maxX = window.innerWidth - width;
|
||||
const maxY = window.innerHeight - totalHeight;
|
||||
|
||||
return {
|
||||
x: Math.min(Math.max(x, minX), maxX),
|
||||
y: Math.min(Math.max(y, minY), maxY),
|
||||
};
|
||||
},
|
||||
[VISIBLE_MARGIN, HEADER_HEIGHT, ERROR_HEIGHT]
|
||||
);
|
||||
|
||||
const handleResizeMove = useCallback(
|
||||
(event) => {
|
||||
if (!resizeStateRef.current) return;
|
||||
|
||||
const clientX =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientX
|
||||
: event.clientX;
|
||||
const clientY =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientY
|
||||
: event.clientY;
|
||||
|
||||
const {
|
||||
startX,
|
||||
startY,
|
||||
startWidth,
|
||||
startHeight,
|
||||
startPos,
|
||||
handle,
|
||||
aspectRatio,
|
||||
} = resizeStateRef.current;
|
||||
const deltaX = clientX - startX;
|
||||
const deltaY = clientY - startY;
|
||||
const widthDelta = deltaX * handle.xDir;
|
||||
const heightDelta = deltaY * handle.yDir;
|
||||
const ratio = aspectRatio || aspectRatioRef.current;
|
||||
|
||||
// Derive width/height while keeping the original aspect ratio
|
||||
let nextWidth = startWidth + widthDelta;
|
||||
let nextHeight = nextWidth / ratio;
|
||||
|
||||
// Allow vertical-driven resize if the user drags mostly vertically
|
||||
if (Math.abs(deltaY) > Math.abs(deltaX)) {
|
||||
nextHeight = startHeight + heightDelta;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Respect minimums while keeping the ratio
|
||||
if (nextWidth < MIN_WIDTH) {
|
||||
nextWidth = MIN_WIDTH;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
if (nextHeight < MIN_HEIGHT) {
|
||||
nextHeight = MIN_HEIGHT;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Keep within viewport with a margin based on current position
|
||||
const posX = startPos?.x ?? 0;
|
||||
const posY = startPos?.y ?? 0;
|
||||
const margin = VISIBLE_MARGIN;
|
||||
let maxWidth = null;
|
||||
let maxHeight = null;
|
||||
|
||||
if (!handle.isLeft) {
|
||||
maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin);
|
||||
}
|
||||
|
||||
if (!handle.isTop) {
|
||||
maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin);
|
||||
}
|
||||
|
||||
if (maxWidth != null && nextWidth > maxWidth) {
|
||||
nextWidth = maxWidth;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
if (maxHeight != null && nextHeight > maxHeight) {
|
||||
nextHeight = maxHeight;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Final pass to honor both bounds while keeping the ratio
|
||||
if (maxWidth != null && nextWidth > maxWidth) {
|
||||
nextWidth = maxWidth;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
setVideoSize({
|
||||
width: Math.round(nextWidth),
|
||||
height: Math.round(nextHeight),
|
||||
});
|
||||
|
||||
if (handle.isLeft || handle.isTop) {
|
||||
let nextX = posX;
|
||||
let nextY = posY;
|
||||
|
||||
if (handle.isLeft) {
|
||||
nextX = posX + (startWidth - nextWidth);
|
||||
}
|
||||
|
||||
if (handle.isTop) {
|
||||
nextY = posY + (startHeight - nextHeight);
|
||||
}
|
||||
|
||||
const clamped = clampToVisibleWithSize(
|
||||
nextX,
|
||||
nextY,
|
||||
nextWidth,
|
||||
nextHeight
|
||||
);
|
||||
|
||||
if (handle.isLeft) {
|
||||
nextX = clamped.x;
|
||||
}
|
||||
|
||||
if (handle.isTop) {
|
||||
nextY = clamped.y;
|
||||
}
|
||||
|
||||
const nextPos = { x: nextX, y: nextY };
|
||||
setDragPosition(nextPos);
|
||||
dragPositionRef.current = nextPos;
|
||||
}
|
||||
},
|
||||
[MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN, clampToVisibleWithSize]
|
||||
);
|
||||
|
||||
const endResize = useCallback(() => {
|
||||
setIsResizing(false);
|
||||
resizeStateRef.current = null;
|
||||
window.removeEventListener('mousemove', handleResizeMove);
|
||||
window.removeEventListener('mouseup', endResize);
|
||||
window.removeEventListener('touchmove', handleResizeMove);
|
||||
window.removeEventListener('touchend', endResize);
|
||||
}, [handleResizeMove]);
|
||||
|
||||
const startResize = (event, handle) => {
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
|
||||
const clientX =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientX
|
||||
: event.clientX;
|
||||
const clientY =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientY
|
||||
: event.clientY;
|
||||
|
||||
const aspectRatio =
|
||||
videoSize.height > 0
|
||||
? videoSize.width / videoSize.height
|
||||
: aspectRatioRef.current;
|
||||
aspectRatioRef.current = aspectRatio;
|
||||
const startPos = dragPositionRef.current ||
|
||||
initialPositionRef.current || { x: 0, y: 0 };
|
||||
|
||||
resizeStateRef.current = {
|
||||
startX: clientX,
|
||||
startY: clientY,
|
||||
startWidth: videoSize.width,
|
||||
startHeight: videoSize.height,
|
||||
aspectRatio,
|
||||
startPos,
|
||||
handle,
|
||||
};
|
||||
|
||||
setIsResizing(true);
|
||||
|
||||
window.addEventListener('mousemove', handleResizeMove);
|
||||
window.addEventListener('mouseup', endResize);
|
||||
window.addEventListener('touchmove', handleResizeMove);
|
||||
window.addEventListener('touchend', endResize);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
endResize();
|
||||
};
|
||||
}, [endResize]);
|
||||
|
||||
useEffect(() => {
|
||||
dragPositionRef.current = dragPosition;
|
||||
}, [dragPosition]);
|
||||
|
||||
// Initialize the floating window near bottom-right once
|
||||
useEffect(() => {
|
||||
if (initialPositionRef.current || typeof window === 'undefined') return;
|
||||
|
||||
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const initialX = Math.max(10, window.innerWidth - videoSize.width - 20);
|
||||
const initialY = Math.max(10, window.innerHeight - totalHeight - 20);
|
||||
const pos = clampToVisible(initialX, initialY);
|
||||
|
||||
initialPositionRef.current = pos;
|
||||
setDragPosition(pos);
|
||||
dragPositionRef.current = pos;
|
||||
}, [
|
||||
clampToVisible,
|
||||
videoSize.height,
|
||||
videoSize.width,
|
||||
HEADER_HEIGHT,
|
||||
ERROR_HEIGHT,
|
||||
]);
|
||||
|
||||
const handleDragStart = useCallback(
|
||||
(event, data) => {
|
||||
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||
const rect = videoContainerRef.current?.getBoundingClientRect();
|
||||
|
||||
if (clientX != null && clientY != null && rect) {
|
||||
dragOffsetRef.current = {
|
||||
x: clientX - rect.left,
|
||||
y: clientY - rect.top,
|
||||
};
|
||||
} else {
|
||||
dragOffsetRef.current = { x: 0, y: 0 };
|
||||
}
|
||||
|
||||
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
const handleDrag = useCallback(
|
||||
(event) => {
|
||||
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||
if (clientX == null || clientY == null) return;
|
||||
|
||||
const nextX = clientX - (dragOffsetRef.current?.x ?? 0);
|
||||
const nextY = clientY - (dragOffsetRef.current?.y ?? 0);
|
||||
const clamped = clampToVisible(nextX, nextY);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
const handleDragStop = useCallback(
|
||||
(_, data) => {
|
||||
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
// If the floating video is hidden or no URL is selected, do not render
|
||||
if (!isVisible || !streamUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Draggable nodeRef={videoContainerRef}>
|
||||
<Draggable
|
||||
nodeRef={videoContainerRef}
|
||||
cancel=".floating-video-no-drag"
|
||||
disabled={isResizing}
|
||||
position={dragPosition || undefined}
|
||||
defaultPosition={initialPositionRef.current || { x: 0, y: 0 }}
|
||||
onStart={handleDragStart}
|
||||
onDrag={handleDrag}
|
||||
onStop={handleDragStop}
|
||||
>
|
||||
<div
|
||||
ref={videoContainerRef}
|
||||
style={{
|
||||
position: 'fixed',
|
||||
bottom: '20px',
|
||||
right: '20px',
|
||||
width: '320px',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: `${videoSize.width}px`,
|
||||
zIndex: 9999,
|
||||
backgroundColor: '#333',
|
||||
borderRadius: '8px',
|
||||
overflow: 'hidden',
|
||||
overflow: 'visible',
|
||||
boxShadow: '0 2px 10px rgba(0,0,0,0.7)',
|
||||
}}
|
||||
>
|
||||
|
|
@ -378,10 +760,12 @@ export default function FloatingVideo() {
|
|||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
className="floating-video-no-drag"
|
||||
style={{
|
||||
width: '100%',
|
||||
height: '180px',
|
||||
height: `${videoSize.height}px`,
|
||||
backgroundColor: '#000',
|
||||
borderRadius: '0 0 8px 8px',
|
||||
// Better controls styling for VOD
|
||||
...(contentType === 'vod' && {
|
||||
controlsList: 'nodownload',
|
||||
|
|
@ -468,6 +852,21 @@ export default function FloatingVideo() {
|
|||
</Text>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Resize handles */}
|
||||
{resizeHandles.map((handle) => (
|
||||
<Box
|
||||
key={handle.id}
|
||||
className="floating-video-no-drag"
|
||||
onMouseDown={(event) => startResize(event, handle)}
|
||||
onTouchStart={(event) => startResize(event, handle)}
|
||||
style={{
|
||||
...resizeHandleBaseStyle,
|
||||
...handle.style,
|
||||
cursor: handle.cursor,
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Draggable>
|
||||
);
|
||||
|
|
|
|||
902
frontend/src/components/backups/BackupManager.jsx
Normal file
902
frontend/src/components/backups/BackupManager.jsx
Normal file
|
|
@ -0,0 +1,902 @@
|
|||
import { useEffect, useMemo, useState } from 'react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
Button,
|
||||
FileInput,
|
||||
Flex,
|
||||
Group,
|
||||
Loader,
|
||||
Modal,
|
||||
NumberInput,
|
||||
Paper,
|
||||
Select,
|
||||
Stack,
|
||||
Switch,
|
||||
Text,
|
||||
TextInput,
|
||||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import {
|
||||
Download,
|
||||
RefreshCcw,
|
||||
RotateCcw,
|
||||
SquareMinus,
|
||||
SquarePlus,
|
||||
UploadCloud,
|
||||
} from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
import API from '../../api';
|
||||
import ConfirmationDialog from '../ConfirmationDialog';
|
||||
import useLocalStorage from '../../hooks/useLocalStorage';
|
||||
import useWarningsStore from '../../store/warnings';
|
||||
import { CustomTable, useTable } from '../tables/CustomTable';
|
||||
|
||||
const RowActions = ({ row, handleDownload, handleRestoreClick, handleDeleteClick, downloading }) => {
|
||||
return (
|
||||
<Flex gap={4} wrap="nowrap">
|
||||
<Tooltip label="Download">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="blue.5"
|
||||
onClick={() => handleDownload(row.original.name)}
|
||||
loading={downloading === row.original.name}
|
||||
disabled={downloading !== null}
|
||||
>
|
||||
<Download size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
<Tooltip label="Restore">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="yellow.5"
|
||||
onClick={() => handleRestoreClick(row.original)}
|
||||
>
|
||||
<RotateCcw size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
<Tooltip label="Delete">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="red.9"
|
||||
onClick={() => handleDeleteClick(row.original)}
|
||||
>
|
||||
<SquareMinus size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
// Convert 24h time string to 12h format with period
|
||||
function to12Hour(time24) {
|
||||
if (!time24) return { time: '12:00', period: 'AM' };
|
||||
const [hours, minutes] = time24.split(':').map(Number);
|
||||
const period = hours >= 12 ? 'PM' : 'AM';
|
||||
const hours12 = hours % 12 || 12;
|
||||
return {
|
||||
time: `${hours12}:${String(minutes).padStart(2, '0')}`,
|
||||
period,
|
||||
};
|
||||
}
|
||||
|
||||
// Convert 12h time + period to 24h format
|
||||
function to24Hour(time12, period) {
|
||||
if (!time12) return '00:00';
|
||||
const [hours, minutes] = time12.split(':').map(Number);
|
||||
let hours24 = hours;
|
||||
if (period === 'PM' && hours !== 12) {
|
||||
hours24 = hours + 12;
|
||||
} else if (period === 'AM' && hours === 12) {
|
||||
hours24 = 0;
|
||||
}
|
||||
return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
|
||||
// Get default timezone (same as Settings page)
|
||||
function getDefaultTimeZone() {
|
||||
try {
|
||||
return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
|
||||
} catch {
|
||||
return 'UTC';
|
||||
}
|
||||
}
|
||||
|
||||
// Validate cron expression
|
||||
function validateCronExpression(expression) {
|
||||
if (!expression || expression.trim() === '') {
|
||||
return { valid: false, error: 'Cron expression is required' };
|
||||
}
|
||||
|
||||
const parts = expression.trim().split(/\s+/);
|
||||
if (parts.length !== 5) {
|
||||
return { valid: false, error: 'Cron expression must have exactly 5 parts: minute hour day month weekday' };
|
||||
}
|
||||
|
||||
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts;
|
||||
|
||||
// Validate each part (allowing *, */N steps, ranges, lists, steps)
|
||||
// Supports: *, */2, 5, 1-5, 1-5/2, 1,3,5, etc.
|
||||
const cronPartRegex = /^(\*\/\d+|\*|\d+(-\d+)?(\/\d+)?(,\d+(-\d+)?(\/\d+)?)*)$/;
|
||||
|
||||
if (!cronPartRegex.test(minute)) {
|
||||
return { valid: false, error: 'Invalid minute field (0-59, *, or cron syntax)' };
|
||||
}
|
||||
if (!cronPartRegex.test(hour)) {
|
||||
return { valid: false, error: 'Invalid hour field (0-23, *, or cron syntax)' };
|
||||
}
|
||||
if (!cronPartRegex.test(dayOfMonth)) {
|
||||
return { valid: false, error: 'Invalid day field (1-31, *, or cron syntax)' };
|
||||
}
|
||||
if (!cronPartRegex.test(month)) {
|
||||
return { valid: false, error: 'Invalid month field (1-12, *, or cron syntax)' };
|
||||
}
|
||||
if (!cronPartRegex.test(dayOfWeek)) {
|
||||
return { valid: false, error: 'Invalid weekday field (0-6, *, or cron syntax)' };
|
||||
}
|
||||
|
||||
// Additional range validation for numeric values
|
||||
const validateRange = (value, min, max, name) => {
|
||||
// Skip if it's * or contains special characters
|
||||
if (value === '*' || value.includes('/') || value.includes('-') || value.includes(',')) {
|
||||
return null;
|
||||
}
|
||||
const num = parseInt(value, 10);
|
||||
if (isNaN(num) || num < min || num > max) {
|
||||
return `${name} must be between ${min} and ${max}`;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const minuteError = validateRange(minute, 0, 59, 'Minute');
|
||||
if (minuteError) return { valid: false, error: minuteError };
|
||||
|
||||
const hourError = validateRange(hour, 0, 23, 'Hour');
|
||||
if (hourError) return { valid: false, error: hourError };
|
||||
|
||||
const dayError = validateRange(dayOfMonth, 1, 31, 'Day');
|
||||
if (dayError) return { valid: false, error: dayError };
|
||||
|
||||
const monthError = validateRange(month, 1, 12, 'Month');
|
||||
if (monthError) return { valid: false, error: monthError };
|
||||
|
||||
const weekdayError = validateRange(dayOfWeek, 0, 6, 'Weekday');
|
||||
if (weekdayError) return { valid: false, error: weekdayError };
|
||||
|
||||
return { valid: true, error: null };
|
||||
}
|
||||
|
||||
const DAYS_OF_WEEK = [
|
||||
{ value: '0', label: 'Sunday' },
|
||||
{ value: '1', label: 'Monday' },
|
||||
{ value: '2', label: 'Tuesday' },
|
||||
{ value: '3', label: 'Wednesday' },
|
||||
{ value: '4', label: 'Thursday' },
|
||||
{ value: '5', label: 'Friday' },
|
||||
{ value: '6', label: 'Saturday' },
|
||||
];
|
||||
|
||||
function formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
export default function BackupManager() {
|
||||
const [backups, setBackups] = useState([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [creating, setCreating] = useState(false);
|
||||
const [downloading, setDownloading] = useState(null);
|
||||
const [uploadFile, setUploadFile] = useState(null);
|
||||
const [uploadModalOpen, setUploadModalOpen] = useState(false);
|
||||
const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false);
|
||||
const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false);
|
||||
const [selectedBackup, setSelectedBackup] = useState(null);
|
||||
|
||||
// Read user's preferences from settings
|
||||
const [timeFormat] = useLocalStorage('time-format', '12h');
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const [tableSize] = useLocalStorage('table-size', 'default');
|
||||
const [userTimezone] = useLocalStorage('time-zone', getDefaultTimeZone());
|
||||
const is12Hour = timeFormat === '12h';
|
||||
|
||||
// Format date according to user preferences
|
||||
const formatDate = (dateString) => {
|
||||
const date = dayjs(dateString);
|
||||
const datePart = dateFormatSetting === 'mdy' ? 'MM/DD/YYYY' : 'DD/MM/YYYY';
|
||||
const timePart = is12Hour ? 'h:mm:ss A' : 'HH:mm:ss';
|
||||
return date.format(`${datePart}, ${timePart}`);
|
||||
};
|
||||
|
||||
// Warning suppression for confirmation dialogs
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
|
||||
// Schedule state
|
||||
const [schedule, setSchedule] = useState({
|
||||
enabled: false,
|
||||
frequency: 'daily',
|
||||
time: '03:00',
|
||||
day_of_week: 0,
|
||||
retention_count: 0,
|
||||
cron_expression: '',
|
||||
});
|
||||
const [scheduleLoading, setScheduleLoading] = useState(false);
|
||||
const [scheduleSaving, setScheduleSaving] = useState(false);
|
||||
const [scheduleChanged, setScheduleChanged] = useState(false);
|
||||
const [advancedMode, setAdvancedMode] = useState(false);
|
||||
const [cronError, setCronError] = useState(null);
|
||||
|
||||
// For 12-hour display mode
|
||||
const [displayTime, setDisplayTime] = useState('3:00');
|
||||
const [timePeriod, setTimePeriod] = useState('AM');
|
||||
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
header: 'Filename',
|
||||
accessorKey: 'name',
|
||||
grow: true,
|
||||
cell: ({ cell }) => (
|
||||
<div
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{cell.getValue()}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Size',
|
||||
accessorKey: 'size',
|
||||
size: 80,
|
||||
cell: ({ cell }) => (
|
||||
<Text size="sm">{formatBytes(cell.getValue())}</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Created',
|
||||
accessorKey: 'created',
|
||||
minSize: 180,
|
||||
cell: ({ cell }) => (
|
||||
<Text size="sm" style={{ whiteSpace: 'nowrap' }}>
|
||||
{formatDate(cell.getValue())}
|
||||
</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
header: 'Actions',
|
||||
size: tableSize === 'compact' ? 75 : 100,
|
||||
},
|
||||
],
|
||||
[tableSize]
|
||||
);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
return (
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
const renderBodyCell = ({ cell, row }) => {
|
||||
switch (cell.column.id) {
|
||||
case 'actions':
|
||||
return (
|
||||
<RowActions
|
||||
row={row}
|
||||
handleDownload={handleDownload}
|
||||
handleRestoreClick={handleRestoreClick}
|
||||
handleDeleteClick={handleDeleteClick}
|
||||
downloading={downloading}
|
||||
/>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const table = useTable({
|
||||
columns,
|
||||
data: backups,
|
||||
allRowIds: backups.map((b) => b.name),
|
||||
bodyCellRenderFns: {
|
||||
actions: renderBodyCell,
|
||||
},
|
||||
headerCellRenderFns: {
|
||||
name: renderHeaderCell,
|
||||
size: renderHeaderCell,
|
||||
created: renderHeaderCell,
|
||||
actions: renderHeaderCell,
|
||||
},
|
||||
});
|
||||
|
||||
const loadBackups = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const backupList = await API.listBackups();
|
||||
setBackups(backupList);
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to load backups',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const loadSchedule = async () => {
|
||||
setScheduleLoading(true);
|
||||
try {
|
||||
const settings = await API.getBackupSchedule();
|
||||
|
||||
// Check if using cron expression (advanced mode)
|
||||
if (settings.cron_expression) {
|
||||
setAdvancedMode(true);
|
||||
}
|
||||
|
||||
setSchedule(settings);
|
||||
|
||||
// Initialize 12-hour display values
|
||||
const { time, period } = to12Hour(settings.time);
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
|
||||
setScheduleChanged(false);
|
||||
} catch (error) {
|
||||
// Ignore errors on initial load - settings may not exist yet
|
||||
} finally {
|
||||
setScheduleLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
loadBackups();
|
||||
loadSchedule();
|
||||
}, []);
|
||||
|
||||
// Validate cron expression when switching to advanced mode
|
||||
useEffect(() => {
|
||||
if (advancedMode && schedule.cron_expression) {
|
||||
const validation = validateCronExpression(schedule.cron_expression);
|
||||
setCronError(validation.valid ? null : validation.error);
|
||||
} else {
|
||||
setCronError(null);
|
||||
}
|
||||
}, [advancedMode, schedule.cron_expression]);
|
||||
|
||||
const handleScheduleChange = (field, value) => {
|
||||
setSchedule((prev) => ({ ...prev, [field]: value }));
|
||||
setScheduleChanged(true);
|
||||
|
||||
// Validate cron expression if in advanced mode
|
||||
if (field === 'cron_expression' && advancedMode) {
|
||||
const validation = validateCronExpression(value);
|
||||
setCronError(validation.valid ? null : validation.error);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle time changes in 12-hour mode
|
||||
const handleTimeChange12h = (newTime, newPeriod) => {
|
||||
const time = newTime ?? displayTime;
|
||||
const period = newPeriod ?? timePeriod;
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
// Convert to 24h and update schedule
|
||||
const time24 = to24Hour(time, period);
|
||||
handleScheduleChange('time', time24);
|
||||
};
|
||||
|
||||
// Handle time changes in 24-hour mode
|
||||
const handleTimeChange24h = (value) => {
|
||||
handleScheduleChange('time', value);
|
||||
// Also update 12h display state in case user switches formats
|
||||
const { time, period } = to12Hour(value);
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
};
|
||||
|
||||
const handleSaveSchedule = async () => {
|
||||
setScheduleSaving(true);
|
||||
try {
|
||||
const scheduleToSave = advancedMode
|
||||
? schedule
|
||||
: { ...schedule, cron_expression: '' };
|
||||
|
||||
const updated = await API.updateBackupSchedule(scheduleToSave);
|
||||
setSchedule(updated);
|
||||
setScheduleChanged(false);
|
||||
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup schedule saved',
|
||||
color: 'green',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to save schedule',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setScheduleSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateBackup = async () => {
|
||||
setCreating(true);
|
||||
try {
|
||||
await API.createBackup();
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to create backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setCreating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownload = async (filename) => {
|
||||
setDownloading(filename);
|
||||
try {
|
||||
await API.downloadBackup(filename);
|
||||
notifications.show({
|
||||
title: 'Download Started',
|
||||
message: `Downloading ${filename}...`,
|
||||
color: 'blue',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to download backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setDownloading(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteClick = (backup) => {
|
||||
setSelectedBackup(backup);
|
||||
setDeleteConfirmOpen(true);
|
||||
};
|
||||
|
||||
const handleDeleteConfirm = async () => {
|
||||
try {
|
||||
await API.deleteBackup(selectedBackup.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup deleted successfully',
|
||||
color: 'green',
|
||||
});
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to delete backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setDeleteConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRestoreClick = (backup) => {
|
||||
setSelectedBackup(backup);
|
||||
setRestoreConfirmOpen(true);
|
||||
};
|
||||
|
||||
const handleRestoreConfirm = async () => {
|
||||
try {
|
||||
await API.restoreBackup(selectedBackup.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup restored successfully. You may need to refresh the page.',
|
||||
color: 'green',
|
||||
});
|
||||
setTimeout(() => window.location.reload(), 2000);
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to restore backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setRestoreConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUploadSubmit = async () => {
|
||||
if (!uploadFile) return;
|
||||
|
||||
try {
|
||||
await API.uploadBackup(uploadFile);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to upload backup',
|
||||
color: 'red',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Stack gap="md">
|
||||
{/* Schedule Settings */}
|
||||
<Stack gap="sm">
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" fw={500}>Scheduled Backups</Text>
|
||||
<Switch
|
||||
checked={schedule.enabled}
|
||||
onChange={(e) => handleScheduleChange('enabled', e.currentTarget.checked)}
|
||||
label={schedule.enabled ? 'Enabled' : 'Disabled'}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" fw={500}>Advanced (Cron Expression)</Text>
|
||||
<Switch
|
||||
checked={advancedMode}
|
||||
onChange={(e) => setAdvancedMode(e.currentTarget.checked)}
|
||||
label={advancedMode ? 'Enabled' : 'Disabled'}
|
||||
disabled={!schedule.enabled}
|
||||
size="sm"
|
||||
/>
|
||||
</Group>
|
||||
|
||||
{scheduleLoading ? (
|
||||
<Loader size="sm" />
|
||||
) : (
|
||||
<>
|
||||
{advancedMode ? (
|
||||
<>
|
||||
<Stack gap="sm">
|
||||
<TextInput
|
||||
label="Cron Expression"
|
||||
value={schedule.cron_expression}
|
||||
onChange={(e) => handleScheduleChange('cron_expression', e.currentTarget.value)}
|
||||
placeholder="0 3 * * *"
|
||||
description="Format: minute hour day month weekday (e.g., '0 3 * * *' = 3:00 AM daily)"
|
||||
disabled={!schedule.enabled}
|
||||
error={cronError}
|
||||
/>
|
||||
<Text size="xs" c="dimmed">
|
||||
Examples: <br />
|
||||
• <code>0 3 * * *</code> - Every day at 3:00 AM<br />
|
||||
• <code>0 2 * * 0</code> - Every Sunday at 2:00 AM<br />
|
||||
• <code>0 */6 * * *</code> - Every 6 hours<br />
|
||||
• <code>30 14 1 * *</code> - 1st of every month at 2:30 PM
|
||||
</Text>
|
||||
</Stack>
|
||||
<Group grow align="flex-end">
|
||||
<NumberInput
|
||||
label="Retention"
|
||||
description="0 = keep all"
|
||||
value={schedule.retention_count}
|
||||
onChange={(value) => handleScheduleChange('retention_count', value || 0)}
|
||||
min={0}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
<Button
|
||||
onClick={handleSaveSchedule}
|
||||
loading={scheduleSaving}
|
||||
disabled={!scheduleChanged || (advancedMode && cronError)}
|
||||
variant="default"
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
</Group>
|
||||
</>
|
||||
) : (
|
||||
<Stack gap="sm">
|
||||
<Group align="flex-end" gap="xs" wrap="nowrap">
|
||||
<Select
|
||||
label="Frequency"
|
||||
value={schedule.frequency}
|
||||
onChange={(value) => handleScheduleChange('frequency', value)}
|
||||
data={[
|
||||
{ value: 'daily', label: 'Daily' },
|
||||
{ value: 'weekly', label: 'Weekly' },
|
||||
]}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
{schedule.frequency === 'weekly' && (
|
||||
<Select
|
||||
label="Day"
|
||||
value={String(schedule.day_of_week)}
|
||||
onChange={(value) => handleScheduleChange('day_of_week', parseInt(value, 10))}
|
||||
data={DAYS_OF_WEEK}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
)}
|
||||
{is12Hour ? (
|
||||
<>
|
||||
<Select
|
||||
label="Hour"
|
||||
value={displayTime ? displayTime.split(':')[0] : '12'}
|
||||
onChange={(value) => {
|
||||
const minute = displayTime ? displayTime.split(':')[1] : '00';
|
||||
handleTimeChange12h(`${value}:${minute}`, null);
|
||||
}}
|
||||
data={Array.from({ length: 12 }, (_, i) => ({
|
||||
value: String(i + 1),
|
||||
label: String(i + 1),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Minute"
|
||||
value={displayTime ? displayTime.split(':')[1] : '00'}
|
||||
onChange={(value) => {
|
||||
const hour = displayTime ? displayTime.split(':')[0] : '12';
|
||||
handleTimeChange12h(`${hour}:${value}`, null);
|
||||
}}
|
||||
data={Array.from({ length: 60 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Period"
|
||||
value={timePeriod}
|
||||
onChange={(value) => handleTimeChange12h(null, value)}
|
||||
data={[
|
||||
{ value: 'AM', label: 'AM' },
|
||||
{ value: 'PM', label: 'PM' },
|
||||
]}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Select
|
||||
label="Hour"
|
||||
value={schedule.time ? schedule.time.split(':')[0] : '00'}
|
||||
onChange={(value) => {
|
||||
const minute = schedule.time ? schedule.time.split(':')[1] : '00';
|
||||
handleTimeChange24h(`${value}:${minute}`);
|
||||
}}
|
||||
data={Array.from({ length: 24 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Minute"
|
||||
value={schedule.time ? schedule.time.split(':')[1] : '00'}
|
||||
onChange={(value) => {
|
||||
const hour = schedule.time ? schedule.time.split(':')[0] : '00';
|
||||
handleTimeChange24h(`${hour}:${value}`);
|
||||
}}
|
||||
data={Array.from({ length: 60 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</Group>
|
||||
<Group grow align="flex-end" gap="xs">
|
||||
<NumberInput
|
||||
label="Retention"
|
||||
description="0 = keep all"
|
||||
value={schedule.retention_count}
|
||||
onChange={(value) => handleScheduleChange('retention_count', value || 0)}
|
||||
min={0}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
<Button
|
||||
onClick={handleSaveSchedule}
|
||||
loading={scheduleSaving}
|
||||
disabled={!scheduleChanged}
|
||||
variant="default"
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
)}
|
||||
|
||||
{/* Timezone info - only show in simple mode */}
|
||||
{!advancedMode && schedule.enabled && schedule.time && (
|
||||
<Text size="xs" c="dimmed" mt="xs">
|
||||
System Timezone: {userTimezone} • Backup will run at {schedule.time} {userTimezone}
|
||||
</Text>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
|
||||
{/* Backups List */}
|
||||
<Stack gap={0}>
|
||||
<Paper>
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
padding: 10,
|
||||
}}
|
||||
>
|
||||
<Flex gap={6}>
|
||||
<Tooltip label="Upload existing backup">
|
||||
<Button
|
||||
leftSection={<UploadCloud size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => setUploadModalOpen(true)}
|
||||
p={5}
|
||||
>
|
||||
Upload
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Tooltip label="Refresh list">
|
||||
<Button
|
||||
leftSection={<RefreshCcw size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={loadBackups}
|
||||
loading={loading}
|
||||
p={5}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Tooltip label="Create new backup">
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={handleCreateBackup}
|
||||
loading={creating}
|
||||
p={5}
|
||||
color="green"
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: 'green',
|
||||
color: 'white',
|
||||
}}
|
||||
>
|
||||
Create Backup
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
maxHeight: 300,
|
||||
width: '100%',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
flex: 1,
|
||||
overflowY: 'auto',
|
||||
overflowX: 'auto',
|
||||
border: 'solid 1px rgb(68,68,68)',
|
||||
borderRadius: 'var(--mantine-radius-default)',
|
||||
}}
|
||||
>
|
||||
{loading ? (
|
||||
<Box p="xl" style={{ display: 'flex', justifyContent: 'center' }}>
|
||||
<Loader />
|
||||
</Box>
|
||||
) : backups.length === 0 ? (
|
||||
<Text size="sm" c="dimmed" p="md" ta="center">
|
||||
No backups found. Create one to get started.
|
||||
</Text>
|
||||
) : (
|
||||
<div style={{ minWidth: 500 }}>
|
||||
<CustomTable table={table} />
|
||||
</div>
|
||||
)}
|
||||
</Box>
|
||||
</Box>
|
||||
</Stack>
|
||||
|
||||
<Modal
|
||||
opened={uploadModalOpen}
|
||||
onClose={() => {
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
}}
|
||||
title="Upload Backup"
|
||||
>
|
||||
<Stack>
|
||||
<FileInput
|
||||
label="Select backup file"
|
||||
placeholder="Choose a .zip file"
|
||||
accept=".zip,application/zip,application/x-zip-compressed"
|
||||
value={uploadFile}
|
||||
onChange={setUploadFile}
|
||||
/>
|
||||
<Group justify="flex-end">
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleUploadSubmit} disabled={!uploadFile} variant="default">
|
||||
Upload
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</Modal>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={restoreConfirmOpen}
|
||||
onClose={() => {
|
||||
setRestoreConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}}
|
||||
onConfirm={handleRestoreConfirm}
|
||||
title="Restore Backup"
|
||||
message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`}
|
||||
confirmLabel="Restore"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="restore-backup"
|
||||
onSuppressChange={suppressWarning}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={deleteConfirmOpen}
|
||||
onClose={() => {
|
||||
setDeleteConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}}
|
||||
onConfirm={handleDeleteConfirm}
|
||||
title="Delete Backup"
|
||||
message={`Are you sure you want to delete "${selectedBackup?.name}"? This action cannot be undone.`}
|
||||
confirmLabel="Delete"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="delete-backup"
|
||||
onSuppressChange={suppressWarning}
|
||||
/>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
|
@ -149,6 +149,9 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
}
|
||||
|
||||
resetForm();
|
||||
// Reset local state to sync with formik reset
|
||||
setSearchPattern('');
|
||||
setReplacePattern('');
|
||||
setSubmitting(false);
|
||||
onClose();
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,8 +1,19 @@
|
|||
// frontend/src/components/forms/SuperuserForm.js
|
||||
import React, { useState } from 'react';
|
||||
import { TextInput, Center, Button, Paper, Title, Stack } from '@mantine/core';
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import {
|
||||
TextInput,
|
||||
Center,
|
||||
Button,
|
||||
Paper,
|
||||
Title,
|
||||
Stack,
|
||||
Text,
|
||||
Image,
|
||||
Divider,
|
||||
} from '@mantine/core';
|
||||
import API from '../../api';
|
||||
import useAuthStore from '../../store/auth';
|
||||
import logo from '../../assets/logo.png';
|
||||
|
||||
function SuperuserForm() {
|
||||
const [formData, setFormData] = useState({
|
||||
|
|
@ -11,8 +22,16 @@ function SuperuserForm() {
|
|||
email: '',
|
||||
});
|
||||
const [error, setError] = useState('');
|
||||
const [version, setVersion] = useState(null);
|
||||
const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch version info
|
||||
API.getVersion().then((data) => {
|
||||
setVersion(data?.version);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleChange = (e) => {
|
||||
setFormData((prev) => ({
|
||||
...prev,
|
||||
|
|
@ -46,11 +65,29 @@ function SuperuserForm() {
|
|||
>
|
||||
<Paper
|
||||
elevation={3}
|
||||
style={{ padding: 30, width: '100%', maxWidth: 400 }}
|
||||
style={{
|
||||
padding: 30,
|
||||
width: '100%',
|
||||
maxWidth: 500,
|
||||
position: 'relative',
|
||||
}}
|
||||
>
|
||||
<Title order={4} align="center">
|
||||
Create your Super User Account
|
||||
</Title>
|
||||
<Stack align="center" spacing="lg">
|
||||
<Image
|
||||
src={logo}
|
||||
alt="Dispatcharr Logo"
|
||||
width={120}
|
||||
height={120}
|
||||
fit="contain"
|
||||
/>
|
||||
<Title order={2} align="center">
|
||||
Dispatcharr
|
||||
</Title>
|
||||
<Text size="sm" color="dimmed" align="center">
|
||||
Welcome! Create your Super User Account to get started.
|
||||
</Text>
|
||||
<Divider style={{ width: '100%' }} />
|
||||
</Stack>
|
||||
<form onSubmit={handleSubmit}>
|
||||
<Stack>
|
||||
<TextInput
|
||||
|
|
@ -77,11 +114,25 @@ function SuperuserForm() {
|
|||
onChange={handleChange}
|
||||
/>
|
||||
|
||||
<Button type="submit" size="sm" sx={{ pt: 1 }}>
|
||||
Submit
|
||||
<Button type="submit" fullWidth>
|
||||
Create Account
|
||||
</Button>
|
||||
</Stack>
|
||||
</form>
|
||||
|
||||
{version && (
|
||||
<Text
|
||||
size="xs"
|
||||
color="dimmed"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
bottom: 6,
|
||||
right: 30,
|
||||
}}
|
||||
>
|
||||
v{version}
|
||||
</Text>
|
||||
)}
|
||||
</Paper>
|
||||
</Center>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/
|
|||
const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`;
|
||||
|
||||
const ChannelEnabledSwitch = React.memo(
|
||||
({ rowId, selectedProfileId, selectedTableIds }) => {
|
||||
({ rowId, selectedProfileId, selectedTableIds, setSelectedTableIds }) => {
|
||||
// Directly extract the channels set once to avoid re-renders on every change.
|
||||
const isEnabled = useChannelsStore(
|
||||
useCallback(
|
||||
|
|
@ -80,16 +80,20 @@ const ChannelEnabledSwitch = React.memo(
|
|||
)
|
||||
);
|
||||
|
||||
const handleToggle = () => {
|
||||
const handleToggle = async () => {
|
||||
if (selectedTableIds.length > 1) {
|
||||
API.updateProfileChannels(
|
||||
await API.updateProfileChannels(
|
||||
selectedTableIds,
|
||||
selectedProfileId,
|
||||
!isEnabled
|
||||
);
|
||||
} else {
|
||||
API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
|
||||
await API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
|
||||
}
|
||||
|
||||
setSelectedTableIds([]);
|
||||
|
||||
return API.requeryChannels();
|
||||
};
|
||||
|
||||
return (
|
||||
|
|
@ -291,6 +295,9 @@ const ChannelsTable = ({}) => {
|
|||
profiles[selectedProfileId]
|
||||
);
|
||||
const [hasFetchedData, setHasFetchedData] = useState(false);
|
||||
const [showDisabled, setShowDisabled] = useState(true);
|
||||
const [showOnlyStreamlessChannels, setShowOnlyStreamlessChannels] =
|
||||
useState(false);
|
||||
|
||||
const [paginationString, setPaginationString] = useState('');
|
||||
const [filters, setFilters] = useState({
|
||||
|
|
@ -375,6 +382,15 @@ const ChannelsTable = ({}) => {
|
|||
params.append('page', pagination.pageIndex + 1);
|
||||
params.append('page_size', pagination.pageSize);
|
||||
params.append('include_streams', 'true');
|
||||
if (selectedProfileId !== '0') {
|
||||
params.append('channel_profile_id', selectedProfileId);
|
||||
}
|
||||
if (showDisabled === true) {
|
||||
params.append('show_disabled', true);
|
||||
}
|
||||
if (showOnlyStreamlessChannels === true) {
|
||||
params.append('only_streamless', true);
|
||||
}
|
||||
|
||||
// Apply sorting
|
||||
if (sorting.length > 0) {
|
||||
|
|
@ -410,7 +426,14 @@ const ChannelsTable = ({}) => {
|
|||
pageSize: pagination.pageSize,
|
||||
});
|
||||
setAllRowIds(ids);
|
||||
}, [pagination, sorting, debouncedFilters]);
|
||||
}, [
|
||||
pagination,
|
||||
sorting,
|
||||
debouncedFilters,
|
||||
showDisabled,
|
||||
selectedProfileId,
|
||||
showOnlyStreamlessChannels,
|
||||
]);
|
||||
|
||||
const stopPropagation = useCallback((e) => {
|
||||
e.stopPropagation();
|
||||
|
|
@ -737,6 +760,7 @@ const ChannelsTable = ({}) => {
|
|||
rowId={row.original.id}
|
||||
selectedProfileId={selectedProfileId}
|
||||
selectedTableIds={table.getState().selectedTableIds}
|
||||
setSelectedTableIds={table.setSelectedTableIds}
|
||||
/>
|
||||
);
|
||||
},
|
||||
|
|
@ -1335,6 +1359,10 @@ const ChannelsTable = ({}) => {
|
|||
deleteChannels={deleteChannels}
|
||||
selectedTableIds={table.selectedTableIds}
|
||||
table={table}
|
||||
showDisabled={showDisabled}
|
||||
setShowDisabled={setShowDisabled}
|
||||
showOnlyStreamlessChannels={showOnlyStreamlessChannels}
|
||||
setShowOnlyStreamlessChannels={setShowOnlyStreamlessChannels}
|
||||
/>
|
||||
|
||||
{/* Table or ghost empty state inside Paper */}
|
||||
|
|
|
|||
|
|
@ -12,20 +12,22 @@ import {
|
|||
Text,
|
||||
TextInput,
|
||||
Tooltip,
|
||||
UnstyledButton,
|
||||
useMantineTheme,
|
||||
} from '@mantine/core';
|
||||
import {
|
||||
ArrowDown01,
|
||||
Binary,
|
||||
Check,
|
||||
CircleCheck,
|
||||
Ellipsis,
|
||||
EllipsisVertical,
|
||||
SquareMinus,
|
||||
SquarePen,
|
||||
SquarePlus,
|
||||
Settings,
|
||||
Eye,
|
||||
EyeOff,
|
||||
Filter,
|
||||
Square,
|
||||
SquareCheck,
|
||||
} from 'lucide-react';
|
||||
import API from '../../../api';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
|
@ -102,6 +104,10 @@ const ChannelTableHeader = ({
|
|||
editChannel,
|
||||
deleteChannels,
|
||||
selectedTableIds,
|
||||
showDisabled,
|
||||
setShowDisabled,
|
||||
showOnlyStreamlessChannels,
|
||||
setShowOnlyStreamlessChannels,
|
||||
}) => {
|
||||
const theme = useMantineTheme();
|
||||
|
||||
|
|
@ -208,6 +214,14 @@ const ChannelTableHeader = ({
|
|||
);
|
||||
};
|
||||
|
||||
const toggleShowDisabled = () => {
|
||||
setShowDisabled(!showDisabled);
|
||||
};
|
||||
|
||||
const toggleShowOnlyStreamlessChannels = () => {
|
||||
setShowOnlyStreamlessChannels(!showOnlyStreamlessChannels);
|
||||
};
|
||||
|
||||
return (
|
||||
<Group justify="space-between">
|
||||
<Group gap={5} style={{ paddingLeft: 10 }}>
|
||||
|
|
@ -236,6 +250,41 @@ const ChannelTableHeader = ({
|
|||
}}
|
||||
>
|
||||
<Flex gap={6}>
|
||||
<Menu shadow="md" width={200}>
|
||||
<Menu.Target>
|
||||
<Button size="xs" variant="default" onClick={() => {}}>
|
||||
<Filter size={18} />
|
||||
</Button>
|
||||
</Menu.Target>
|
||||
|
||||
<Menu.Dropdown>
|
||||
<Menu.Item
|
||||
onClick={toggleShowDisabled}
|
||||
leftSection={
|
||||
showDisabled ? <Eye size={18} /> : <EyeOff size={18} />
|
||||
}
|
||||
disabled={selectedProfileId === '0'}
|
||||
>
|
||||
<Text size="xs">
|
||||
{showDisabled ? 'Hide Disabled' : 'Show Disabled'}
|
||||
</Text>
|
||||
</Menu.Item>
|
||||
|
||||
<Menu.Item
|
||||
onClick={toggleShowOnlyStreamlessChannels}
|
||||
leftSection={
|
||||
showOnlyStreamlessChannels ? (
|
||||
<SquareCheck size={18} />
|
||||
) : (
|
||||
<Square size={18} />
|
||||
)
|
||||
}
|
||||
>
|
||||
<Text size="xs">Only Empty Channels</Text>
|
||||
</Menu.Item>
|
||||
</Menu.Dropdown>
|
||||
</Menu>
|
||||
|
||||
<Button
|
||||
leftSection={<SquarePen size={18} />}
|
||||
variant="default"
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import { isNotEmpty, useForm } from '@mantine/form';
|
|||
import { notifications } from '@mantine/notifications';
|
||||
import UserAgentsTable from '../components/tables/UserAgentsTable';
|
||||
import StreamProfilesTable from '../components/tables/StreamProfilesTable';
|
||||
import BackupManager from '../components/backups/BackupManager';
|
||||
import useLocalStorage from '../hooks/useLocalStorage';
|
||||
import useAuthStore from '../store/auth';
|
||||
import {
|
||||
|
|
@ -1055,6 +1056,10 @@ const SettingsPage = () => {
|
|||
value: 'm3u_id',
|
||||
label: 'M3U ID',
|
||||
},
|
||||
{
|
||||
value: 'group',
|
||||
label: 'Group',
|
||||
},
|
||||
]}
|
||||
{...form.getInputProps('m3u-hash-key')}
|
||||
key={form.key('m3u-hash-key')}
|
||||
|
|
@ -1306,6 +1311,13 @@ const SettingsPage = () => {
|
|||
</form>
|
||||
</Accordion.Panel>
|
||||
</Accordion.Item>
|
||||
|
||||
<Accordion.Item value="backups">
|
||||
<Accordion.Control>Backup & Restore</Accordion.Control>
|
||||
<Accordion.Panel>
|
||||
<BackupManager />
|
||||
</Accordion.Panel>
|
||||
</Accordion.Item>
|
||||
</>
|
||||
)}
|
||||
</Accordion>
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ const getStartDate = (uptime) => {
|
|||
};
|
||||
|
||||
// Create a VOD Card component similar to ChannelCard
|
||||
const VODCard = ({ vodContent }) => {
|
||||
const VODCard = ({ vodContent, stopVODClient }) => {
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
||||
const [isClientExpanded, setIsClientExpanded] = useState(false);
|
||||
|
|
@ -329,6 +329,19 @@ const VODCard = ({ vodContent }) => {
|
|||
</Center>
|
||||
</Tooltip>
|
||||
)}
|
||||
{connection && stopVODClient && (
|
||||
<Center>
|
||||
<Tooltip label="Stop VOD Connection">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() => stopVODClient(connection.client_id)}
|
||||
>
|
||||
<SquareX size="24" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Center>
|
||||
)}
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
|
|
@ -1297,6 +1310,12 @@ const ChannelsPage = () => {
|
|||
await API.stopClient(channelId, clientId);
|
||||
};
|
||||
|
||||
const stopVODClient = async (clientId) => {
|
||||
await API.stopVODClient(clientId);
|
||||
// Refresh VOD stats after stopping to update the UI
|
||||
fetchVODStats();
|
||||
};
|
||||
|
||||
// Function to fetch channel stats from API
|
||||
const fetchChannelStats = useCallback(async () => {
|
||||
try {
|
||||
|
|
@ -1585,7 +1604,11 @@ const ChannelsPage = () => {
|
|||
);
|
||||
} else if (connection.type === 'vod') {
|
||||
return (
|
||||
<VODCard key={connection.id} vodContent={connection.data} />
|
||||
<VODCard
|
||||
key={connection.id}
|
||||
vodContent={connection.data}
|
||||
stopVODClient={stopVODClient}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -1,32 +1,32 @@
|
|||
Django==5.2.4
|
||||
psycopg2-binary==2.9.10
|
||||
celery[redis]==5.5.3
|
||||
djangorestframework==3.16.0
|
||||
requests==2.32.4
|
||||
psutil==7.0.0
|
||||
Django==5.2.9
|
||||
psycopg2-binary==2.9.11
|
||||
celery[redis]==5.6.0
|
||||
djangorestframework==3.16.1
|
||||
requests==2.32.5
|
||||
psutil==7.1.3
|
||||
pillow
|
||||
drf-yasg>=1.20.0
|
||||
drf-yasg>=1.21.11
|
||||
streamlink
|
||||
python-vlc
|
||||
yt-dlp
|
||||
gevent==25.5.1
|
||||
gevent==25.9.1
|
||||
daphne
|
||||
uwsgi
|
||||
django-cors-headers
|
||||
djangorestframework-simplejwt
|
||||
m3u8
|
||||
rapidfuzz==3.13.0
|
||||
rapidfuzz==3.14.3
|
||||
regex # Required by transformers but also used for advanced regex features
|
||||
tzlocal
|
||||
|
||||
# PyTorch dependencies (CPU only)
|
||||
--extra-index-url https://download.pytorch.org/whl/cpu/
|
||||
torch==2.7.1+cpu
|
||||
torch==2.9.1+cpu
|
||||
|
||||
# ML/NLP dependencies
|
||||
sentence-transformers==5.1.0
|
||||
sentence-transformers==5.2.0
|
||||
channels
|
||||
channels-redis==4.3.0
|
||||
django-filter
|
||||
django-celery-beat
|
||||
lxml==6.0.0
|
||||
lxml==6.0.2
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue