mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'dev' of https://github.com/Dispatcharr/Dispatcharr into pr/sethwv/757
This commit is contained in:
commit
31b9868bfd
35 changed files with 1808 additions and 775 deletions
87
.github/workflows/base-image.yml
vendored
87
.github/workflows/base-image.yml
vendored
|
|
@ -101,6 +101,28 @@ jobs:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||||
|
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||||
|
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||||
|
org.opencontainers.image.revision=${{ github.sha }}
|
||||||
|
org.opencontainers.image.licenses=See repository
|
||||||
|
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||||
|
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||||
|
org.opencontainers.image.authors=${{ github.actor }}
|
||||||
|
maintainer=${{ github.actor }}
|
||||||
|
build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||||
|
|
||||||
- name: Build and push Docker base image
|
- name: Build and push Docker base image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
|
|
@ -113,6 +135,7 @@ jobs:
|
||||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
|
@ -154,18 +177,74 @@ jobs:
|
||||||
|
|
||||||
# GitHub Container Registry manifests
|
# GitHub Container Registry manifests
|
||||||
# branch tag (e.g. base or base-dev)
|
# branch tag (e.g. base or base-dev)
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||||
|
|
||||||
# branch + timestamp tag
|
# branch + timestamp tag
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||||
|
|
||||||
# Docker Hub manifests
|
# Docker Hub manifests
|
||||||
# branch tag (e.g. base or base-dev)
|
# branch tag (e.g. base or base-dev)
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||||
|
|
||||||
# branch + timestamp tag
|
# branch + timestamp tag
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||||
|
|
|
||||||
87
.github/workflows/ci.yml
vendored
87
.github/workflows/ci.yml
vendored
|
|
@ -119,7 +119,27 @@ jobs:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
# use metadata from the prepare job
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||||
|
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||||
|
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||||
|
org.opencontainers.image.revision=${{ github.sha }}
|
||||||
|
org.opencontainers.image.licenses=See repository
|
||||||
|
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||||
|
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||||
|
org.opencontainers.image.authors=${{ github.actor }}
|
||||||
|
maintainer=${{ github.actor }}
|
||||||
|
build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v4
|
||||||
|
|
@ -137,6 +157,7 @@ jobs:
|
||||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
|
@ -181,16 +202,72 @@ jobs:
|
||||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||||
|
|
||||||
# branch tag (e.g. latest or dev)
|
# branch tag (e.g. latest or dev)
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||||
|
|
||||||
# version + timestamp tag
|
# version + timestamp tag
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||||
|
|
||||||
# also create Docker Hub manifests using the same username
|
# also create Docker Hub manifests using the same username
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||||
|
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||||
|
|
|
||||||
95
.github/workflows/release.yml
vendored
95
.github/workflows/release.yml
vendored
|
|
@ -25,6 +25,7 @@ jobs:
|
||||||
new_version: ${{ steps.update_version.outputs.new_version }}
|
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||||
|
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
|
|
@ -56,6 +57,12 @@ jobs:
|
||||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Generate timestamp for build
|
||||||
|
id: timestamp
|
||||||
|
run: |
|
||||||
|
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||||
|
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Commit and Tag
|
- name: Commit and Tag
|
||||||
run: |
|
run: |
|
||||||
git add version.py CHANGELOG.md
|
git add version.py CHANGELOG.md
|
||||||
|
|
@ -104,6 +111,28 @@ jobs:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||||
|
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }}
|
||||||
|
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||||
|
org.opencontainers.image.revision=${{ github.sha }}
|
||||||
|
org.opencontainers.image.licenses=See repository
|
||||||
|
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||||
|
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||||
|
org.opencontainers.image.authors=${{ github.actor }}
|
||||||
|
maintainer=${{ github.actor }}
|
||||||
|
build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
|
|
@ -115,6 +144,7 @@ jobs:
|
||||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||||
|
|
@ -149,25 +179,82 @@ jobs:
|
||||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||||
|
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||||
|
|
||||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||||
|
|
||||||
# GitHub Container Registry manifests
|
# GitHub Container Registry manifests
|
||||||
# latest tag
|
# latest tag
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=latest" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||||
|
|
||||||
# version tag
|
# version tag
|
||||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||||
|
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||||
|
|
||||||
# Docker Hub manifests
|
# Docker Hub manifests
|
||||||
# latest tag
|
# latest tag
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=latest" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||||
|
|
||||||
# version tag
|
# version tag
|
||||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
docker buildx imagetools create \
|
||||||
|
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||||
|
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||||
|
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||||
|
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||||
|
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||||
|
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||||
|
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||||
|
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||||
|
--annotation "index:maintainer=${{ github.actor }}" \
|
||||||
|
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||||
|
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||||
|
|
||||||
create-release:
|
create-release:
|
||||||
|
|
|
||||||
46
CHANGELOG.md
46
CHANGELOG.md
|
|
@ -9,17 +9,57 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153)
|
- Advanced filtering for Channels table: Filter menu now allows toggling disabled channels visibility (when a profile is selected) and filtering to show only empty channels without streams (Closes #182)
|
||||||
|
- Network Access warning modal now displays the client's IP address for better transparency when network restrictions are being enforced - Thanks [@damien-alt-sudo](https://github.com/damien-alt-sudo) (Closes #778)
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
- Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom)
|
- Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772)
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Closes #744)
|
- Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect
|
||||||
|
|
||||||
|
## [0.15.1] - 2025-12-22
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- XtreamCodes EPG `has_archive` field now returns integer `0` instead of string `"0"` for proper JSON type consistency
|
||||||
|
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744)
|
||||||
|
|
||||||
|
## [0.15.0] - 2025-12-20
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- VOD client stop button in Stats page: Users can now disconnect individual VOD clients from the Stats view, similar to the existing channel client disconnect functionality.
|
||||||
|
- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153)
|
||||||
|
- Stream group as available hash option: Users can now select 'Group' as a hash key option in Settings → Stream Settings → M3U Hash Key, allowing streams to be differentiated by their group membership in addition to name, URL, TVG-ID, and M3U ID
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Initial super user creation page now matches the login page design with logo, welcome message, divider, and version display for a more consistent and polished first-time setup experience
|
||||||
|
- Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom)
|
||||||
|
- GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724)
|
||||||
|
- Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack)
|
||||||
|
- Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662)
|
||||||
|
- Frontend dependencies updated: Vite (6.2.0 → 7.1.7), ESLint (9.21.0 → 9.27.0), and related packages; added npm `overrides` to enforce js-yaml@^4.1.1 for transitive security fix. All 6 reported vulnerabilities resolved with `npm audit fix`.
|
||||||
|
- Floating video player now supports resizing via a drag handles, with minimum size enforcement and viewport/page boundary constraints to keep it visible.
|
||||||
|
- Redis connection settings now fully configurable via environment variables (`REDIS_HOST`, `REDIS_PORT`, `REDIS_DB`, `REDIS_URL`), replacing hardcoded `localhost:6379` values throughout the codebase. This enables use of external Redis services in production deployments. (Closes #762)
|
||||||
|
- Celery broker and result backend URLs now respect `REDIS_HOST`/`REDIS_PORT`/`REDIS_DB` settings as defaults, with `CELERY_BROKER_URL` and `CELERY_RESULT_BACKEND` environment variables available for override.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Docker init script now validates DISPATCHARR_PORT is an integer before using it, preventing sed errors when Kubernetes sets it to a service URL like `tcp://10.98.37.10:80`. Falls back to default port 9191 when invalid (Fixes #737)
|
||||||
|
- M3U Profile form now properly resets local state for search and replace patterns after saving, preventing validation errors when adding multiple profiles in a row
|
||||||
|
- DVR series rule deletion now properly handles TVG IDs that contain slashes by encoding them in the URL path (Fixes #697)
|
||||||
|
- VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556)
|
||||||
|
- XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569)
|
||||||
|
- XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities)
|
||||||
|
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744)
|
||||||
- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id
|
- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id
|
||||||
- XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError
|
- XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError
|
||||||
|
- XtreamCodes EPG `id` field now provides unique identifiers per program listing instead of always returning "0" for better client EPG handling
|
||||||
|
- XtreamCodes EPG `epg_id` field now correctly returns the EPGData record ID (representing the EPG source/channel mapping) instead of a dummy value
|
||||||
|
|
||||||
## [0.14.0] - 2025-12-09
|
## [0.14.0] - 2025-12-09
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,11 +19,11 @@ SETTING_KEYS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULTS = {
|
DEFAULTS = {
|
||||||
"enabled": False,
|
"enabled": True,
|
||||||
"frequency": "daily",
|
"frequency": "daily",
|
||||||
"time": "03:00",
|
"time": "03:00",
|
||||||
"day_of_week": 0, # Sunday
|
"day_of_week": 0, # Sunday
|
||||||
"retention_count": 0,
|
"retention_count": 3,
|
||||||
"cron_expression": "",
|
"cron_expression": "",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ urlpatterns = [
|
||||||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ from django.shortcuts import get_object_or_404, get_list_or_404
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
import os, json, requests, logging
|
import os, json, requests, logging
|
||||||
|
from urllib.parse import unquote
|
||||||
from apps.accounts.permissions import (
|
from apps.accounts.permissions import (
|
||||||
Authenticated,
|
Authenticated,
|
||||||
IsAdmin,
|
IsAdmin,
|
||||||
|
|
@ -2053,7 +2054,7 @@ class DeleteSeriesRuleAPIView(APIView):
|
||||||
return [Authenticated()]
|
return [Authenticated()]
|
||||||
|
|
||||||
def delete(self, request, tvg_id):
|
def delete(self, request, tvg_id):
|
||||||
tvg_id = str(tvg_id)
|
tvg_id = unquote(str(tvg_id or ""))
|
||||||
rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id]
|
rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id]
|
||||||
CoreSettings.set_dvr_series_rules(rules)
|
CoreSettings.set_dvr_series_rules(rules)
|
||||||
return Response({"success": True, "rules": rules})
|
return Response({"success": True, "rules": rules})
|
||||||
|
|
|
||||||
|
|
@ -119,11 +119,11 @@ class Stream(models.Model):
|
||||||
return self.name or self.url or f"Stream ID {self.id}"
|
return self.name or self.url or f"Stream ID {self.id}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
|
||||||
if keys is None:
|
if keys is None:
|
||||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||||
|
|
||||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
|
||||||
|
|
||||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -792,7 +792,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
||||||
group_title = group_name
|
group_title = group_name
|
||||||
|
|
||||||
stream_hash = Stream.generate_hash_key(
|
stream_hash = Stream.generate_hash_key(
|
||||||
name, url, tvg_id, hash_keys, m3u_id=account_id
|
name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title
|
||||||
)
|
)
|
||||||
stream_props = {
|
stream_props = {
|
||||||
"name": name,
|
"name": name,
|
||||||
|
|
@ -966,7 +966,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id)
|
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title)
|
||||||
stream_props = {
|
stream_props = {
|
||||||
"name": name,
|
"name": name,
|
||||||
"url": url,
|
"url": url,
|
||||||
|
|
|
||||||
|
|
@ -2292,17 +2292,27 @@ def xc_get_epg(request, user, short=False):
|
||||||
output = {"epg_listings": []}
|
output = {"epg_listings": []}
|
||||||
|
|
||||||
for program in programs:
|
for program in programs:
|
||||||
id = "0"
|
|
||||||
epg_id = "0"
|
|
||||||
title = program['title'] if isinstance(program, dict) else program.title
|
title = program['title'] if isinstance(program, dict) else program.title
|
||||||
description = program['description'] if isinstance(program, dict) else program.description
|
description = program['description'] if isinstance(program, dict) else program.description
|
||||||
|
|
||||||
start = program["start_time"] if isinstance(program, dict) else program.start_time
|
start = program["start_time"] if isinstance(program, dict) else program.start_time
|
||||||
end = program["end_time"] if isinstance(program, dict) else program.end_time
|
end = program["end_time"] if isinstance(program, dict) else program.end_time
|
||||||
|
|
||||||
|
# For database programs, use actual ID; for generated dummy programs, create synthetic ID
|
||||||
|
if isinstance(program, dict):
|
||||||
|
# Generated dummy program - create unique ID from channel + timestamp
|
||||||
|
program_id = str(abs(hash(f"{channel_id}_{int(start.timestamp())}")))
|
||||||
|
else:
|
||||||
|
# Database program - use actual ID
|
||||||
|
program_id = str(program.id)
|
||||||
|
|
||||||
|
# epg_id refers to the EPG source/channel mapping in XC panels
|
||||||
|
# Use the actual EPGData ID when available, otherwise fall back to 0
|
||||||
|
epg_id = str(channel.epg_data.id) if channel.epg_data else "0"
|
||||||
|
|
||||||
program_output = {
|
program_output = {
|
||||||
"id": f"{id}",
|
"id": program_id,
|
||||||
"epg_id": f"{epg_id}",
|
"epg_id": epg_id,
|
||||||
"title": base64.b64encode((title or "").encode()).decode(),
|
"title": base64.b64encode((title or "").encode()).decode(),
|
||||||
"lang": "",
|
"lang": "",
|
||||||
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
|
@ -2521,34 +2531,45 @@ def xc_get_series_info(request, user, series_id):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
||||||
|
|
||||||
# Get episodes for this series from the same M3U account
|
# Get unique episodes for this series that have relations from any active M3U account
|
||||||
episode_relations = M3UEpisodeRelation.objects.filter(
|
# We query episodes directly to avoid duplicates when multiple relations exist
|
||||||
episode__series=series,
|
# (e.g., same episode in different languages/qualities)
|
||||||
m3u_account=series_relation.m3u_account
|
from apps.vod.models import Episode
|
||||||
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
|
episodes = Episode.objects.filter(
|
||||||
|
series=series,
|
||||||
|
m3u_relations__m3u_account__is_active=True
|
||||||
|
).distinct().order_by('season_number', 'episode_number')
|
||||||
|
|
||||||
# Group episodes by season
|
# Group episodes by season
|
||||||
seasons = {}
|
seasons = {}
|
||||||
for relation in episode_relations:
|
for episode in episodes:
|
||||||
episode = relation.episode
|
|
||||||
season_num = episode.season_number or 1
|
season_num = episode.season_number or 1
|
||||||
if season_num not in seasons:
|
if season_num not in seasons:
|
||||||
seasons[season_num] = []
|
seasons[season_num] = []
|
||||||
|
|
||||||
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
|
# Get the highest priority relation for this episode (for container_extension, video/audio/bitrate)
|
||||||
from apps.vod.models import M3UEpisodeRelation
|
from apps.vod.models import M3UEpisodeRelation
|
||||||
first_relation = M3UEpisodeRelation.objects.filter(
|
best_relation = M3UEpisodeRelation.objects.filter(
|
||||||
episode=episode
|
episode=episode,
|
||||||
|
m3u_account__is_active=True
|
||||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||||
|
|
||||||
video = audio = bitrate = None
|
video = audio = bitrate = None
|
||||||
if first_relation and first_relation.custom_properties:
|
container_extension = "mp4"
|
||||||
info = first_relation.custom_properties.get('info')
|
added_timestamp = str(int(episode.created_at.timestamp()))
|
||||||
if info and isinstance(info, dict):
|
|
||||||
info_info = info.get('info')
|
if best_relation:
|
||||||
if info_info and isinstance(info_info, dict):
|
container_extension = best_relation.container_extension or "mp4"
|
||||||
video = info_info.get('video', {})
|
added_timestamp = str(int(best_relation.created_at.timestamp()))
|
||||||
audio = info_info.get('audio', {})
|
if best_relation.custom_properties:
|
||||||
bitrate = info_info.get('bitrate', 0)
|
info = best_relation.custom_properties.get('info')
|
||||||
|
if info and isinstance(info, dict):
|
||||||
|
info_info = info.get('info')
|
||||||
|
if info_info and isinstance(info_info, dict):
|
||||||
|
video = info_info.get('video', {})
|
||||||
|
audio = info_info.get('audio', {})
|
||||||
|
bitrate = info_info.get('bitrate', 0)
|
||||||
|
|
||||||
if video is None:
|
if video is None:
|
||||||
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
||||||
if audio is None:
|
if audio is None:
|
||||||
|
|
@ -2561,8 +2582,8 @@ def xc_get_series_info(request, user, series_id):
|
||||||
"season": season_num,
|
"season": season_num,
|
||||||
"episode_num": episode.episode_number or 0,
|
"episode_num": episode.episode_number or 0,
|
||||||
"title": episode.name,
|
"title": episode.name,
|
||||||
"container_extension": relation.container_extension or "mp4",
|
"container_extension": container_extension,
|
||||||
"added": str(int(relation.created_at.timestamp())),
|
"added": added_timestamp,
|
||||||
"custom_sid": None,
|
"custom_sid": None,
|
||||||
"direct_source": "",
|
"direct_source": "",
|
||||||
"info": {
|
"info": {
|
||||||
|
|
@ -2878,7 +2899,7 @@ def xc_series_stream(request, username, password, stream_id, extension):
|
||||||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||||
except M3UEpisodeRelation.DoesNotExist:
|
except M3UEpisodeRelation.DoesNotExist:
|
||||||
return JsonResponse({"error": "Episode not found"}, status=404)
|
return JsonResponse({"error": "Episode not found"}, status=404)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -48,9 +48,11 @@ class ClientManager:
|
||||||
# Import here to avoid potential import issues
|
# Import here to avoid potential import issues
|
||||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||||
import redis
|
import redis
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
# Get all channels from Redis
|
# Get all channels from Redis using settings
|
||||||
redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True)
|
redis_url = getattr(settings, 'REDIS_URL', 'redis://localhost:6379/0')
|
||||||
|
redis_client = redis.Redis.from_url(redis_url, decode_responses=True)
|
||||||
all_channels = []
|
all_channels = []
|
||||||
cursor = 0
|
cursor = 0
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -471,7 +471,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
||||||
# If HEAD not supported, server will return 405 or other error
|
# If HEAD not supported, server will return 405 or other error
|
||||||
if 200 <= head_response.status_code < 300:
|
if 200 <= head_response.status_code < 300:
|
||||||
# HEAD request successful
|
# HEAD request successful
|
||||||
return True, head_response.url, head_response.status_code, "Valid (HEAD request)"
|
return True, url, head_response.status_code, "Valid (HEAD request)"
|
||||||
|
|
||||||
# Try a GET request with stream=True to avoid downloading all content
|
# Try a GET request with stream=True to avoid downloading all content
|
||||||
get_response = session.get(
|
get_response = session.get(
|
||||||
|
|
@ -484,7 +484,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
||||||
# IMPORTANT: Check status code first before checking content
|
# IMPORTANT: Check status code first before checking content
|
||||||
if not (200 <= get_response.status_code < 300):
|
if not (200 <= get_response.status_code < 300):
|
||||||
logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}")
|
logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}")
|
||||||
return False, get_response.url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
return False, url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
||||||
|
|
||||||
# Only check content if status code is valid
|
# Only check content if status code is valid
|
||||||
try:
|
try:
|
||||||
|
|
@ -538,7 +538,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
||||||
get_response.close()
|
get_response.close()
|
||||||
|
|
||||||
# If we have content, consider it valid even with unrecognized content type
|
# If we have content, consider it valid even with unrecognized content type
|
||||||
return is_valid, get_response.url, get_response.status_code, message
|
return is_valid, url, get_response.status_code, message
|
||||||
|
|
||||||
except requests.exceptions.Timeout:
|
except requests.exceptions.Timeout:
|
||||||
return False, url, 0, "Timeout connecting to stream"
|
return False, url, 0, "Timeout connecting to stream"
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,11 @@ class PersistentVODConnection:
|
||||||
# First check if we have a pre-stored content length from HEAD request
|
# First check if we have a pre-stored content length from HEAD request
|
||||||
try:
|
try:
|
||||||
import redis
|
import redis
|
||||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
from django.conf import settings
|
||||||
|
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||||
|
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||||
|
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||||
|
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||||
content_length_key = f"vod_content_length:{self.session_id}"
|
content_length_key = f"vod_content_length:{self.session_id}"
|
||||||
stored_length = r.get(content_length_key)
|
stored_length = r.get(content_length_key)
|
||||||
if stored_length:
|
if stored_length:
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,11 @@ from apps.m3u.models import M3UAccountProfile
|
||||||
logger = logging.getLogger("vod_proxy")
|
logger = logging.getLogger("vod_proxy")
|
||||||
|
|
||||||
|
|
||||||
|
def get_vod_client_stop_key(client_id):
|
||||||
|
"""Get the Redis key for signaling a VOD client to stop"""
|
||||||
|
return f"vod_proxy:client:{client_id}:stop"
|
||||||
|
|
||||||
|
|
||||||
def infer_content_type_from_url(url: str) -> Optional[str]:
|
def infer_content_type_from_url(url: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Infer MIME type from file extension in URL
|
Infer MIME type from file extension in URL
|
||||||
|
|
@ -832,6 +837,7 @@ class MultiWorkerVODConnectionManager:
|
||||||
# Create streaming generator
|
# Create streaming generator
|
||||||
def stream_generator():
|
def stream_generator():
|
||||||
decremented = False
|
decremented = False
|
||||||
|
stop_signal_detected = False
|
||||||
try:
|
try:
|
||||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
||||||
|
|
||||||
|
|
@ -846,14 +852,25 @@ class MultiWorkerVODConnectionManager:
|
||||||
bytes_sent = 0
|
bytes_sent = 0
|
||||||
chunk_count = 0
|
chunk_count = 0
|
||||||
|
|
||||||
|
# Get the stop signal key for this client
|
||||||
|
stop_key = get_vod_client_stop_key(client_id)
|
||||||
|
|
||||||
for chunk in upstream_response.iter_content(chunk_size=8192):
|
for chunk in upstream_response.iter_content(chunk_size=8192):
|
||||||
if chunk:
|
if chunk:
|
||||||
yield chunk
|
yield chunk
|
||||||
bytes_sent += len(chunk)
|
bytes_sent += len(chunk)
|
||||||
chunk_count += 1
|
chunk_count += 1
|
||||||
|
|
||||||
# Update activity every 100 chunks in consolidated connection state
|
# Check for stop signal every 100 chunks
|
||||||
if chunk_count % 100 == 0:
|
if chunk_count % 100 == 0:
|
||||||
|
# Check if stop signal has been set
|
||||||
|
if self.redis_client and self.redis_client.exists(stop_key):
|
||||||
|
logger.info(f"[{client_id}] Worker {self.worker_id} - Stop signal detected, terminating stream")
|
||||||
|
# Delete the stop key
|
||||||
|
self.redis_client.delete(stop_key)
|
||||||
|
stop_signal_detected = True
|
||||||
|
break
|
||||||
|
|
||||||
# Update the connection state
|
# Update the connection state
|
||||||
logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}")
|
logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}")
|
||||||
if redis_connection._acquire_lock():
|
if redis_connection._acquire_lock():
|
||||||
|
|
@ -867,7 +884,10 @@ class MultiWorkerVODConnectionManager:
|
||||||
finally:
|
finally:
|
||||||
redis_connection._release_lock()
|
redis_connection._release_lock()
|
||||||
|
|
||||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
if stop_signal_detected:
|
||||||
|
logger.info(f"[{client_id}] Worker {self.worker_id} - Stream stopped by signal: {bytes_sent} bytes sent")
|
||||||
|
else:
|
||||||
|
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||||
redis_connection.decrement_active_streams()
|
redis_connection.decrement_active_streams()
|
||||||
decremented = True
|
decremented = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,4 +21,7 @@ urlpatterns = [
|
||||||
|
|
||||||
# VOD Stats
|
# VOD Stats
|
||||||
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
||||||
|
|
||||||
|
# Stop VOD client connection
|
||||||
|
path('stop_client/', views.stop_vod_client, name='stop_vod_client'),
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ from django.views import View
|
||||||
from apps.vod.models import Movie, Series, Episode
|
from apps.vod.models import Movie, Series, Episode
|
||||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url
|
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url, get_vod_client_stop_key
|
||||||
from .utils import get_client_info, create_vod_response
|
from .utils import get_client_info, create_vod_response
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -329,7 +329,11 @@ class VODStreamView(View):
|
||||||
# Store the total content length in Redis for the persistent connection to use
|
# Store the total content length in Redis for the persistent connection to use
|
||||||
try:
|
try:
|
||||||
import redis
|
import redis
|
||||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
from django.conf import settings
|
||||||
|
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||||
|
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||||
|
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||||
|
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||||
content_length_key = f"vod_content_length:{session_id}"
|
content_length_key = f"vod_content_length:{session_id}"
|
||||||
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
||||||
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
||||||
|
|
@ -1011,3 +1015,59 @@ class VODStatsView(View):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting VOD stats: {e}")
|
logger.error(f"Error getting VOD stats: {e}")
|
||||||
return JsonResponse({'error': str(e)}, status=500)
|
return JsonResponse({'error': str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from apps.accounts.permissions import IsAdmin
|
||||||
|
|
||||||
|
|
||||||
|
@csrf_exempt
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAdmin])
|
||||||
|
def stop_vod_client(request):
|
||||||
|
"""Stop a specific VOD client connection using stop signal mechanism"""
|
||||||
|
try:
|
||||||
|
# Parse request body
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
data = json.loads(request.body)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||||
|
|
||||||
|
client_id = data.get('client_id')
|
||||||
|
if not client_id:
|
||||||
|
return JsonResponse({'error': 'No client_id provided'}, status=400)
|
||||||
|
|
||||||
|
logger.info(f"Request to stop VOD client: {client_id}")
|
||||||
|
|
||||||
|
# Get Redis client
|
||||||
|
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||||
|
redis_client = connection_manager.redis_client
|
||||||
|
|
||||||
|
if not redis_client:
|
||||||
|
return JsonResponse({'error': 'Redis not available'}, status=500)
|
||||||
|
|
||||||
|
# Check if connection exists
|
||||||
|
connection_key = f"vod_persistent_connection:{client_id}"
|
||||||
|
connection_data = redis_client.hgetall(connection_key)
|
||||||
|
if not connection_data:
|
||||||
|
logger.warning(f"VOD connection not found: {client_id}")
|
||||||
|
return JsonResponse({'error': 'Connection not found'}, status=404)
|
||||||
|
|
||||||
|
# Set a stop signal key that the worker will check
|
||||||
|
stop_key = get_vod_client_stop_key(client_id)
|
||||||
|
redis_client.setex(stop_key, 60, "true") # 60 second TTL
|
||||||
|
|
||||||
|
logger.info(f"Set stop signal for VOD client: {client_id}")
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
'message': 'VOD client stop signal sent',
|
||||||
|
'client_id': client_id,
|
||||||
|
'stop_key': stop_key
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error stopping VOD client: {e}", exc_info=True)
|
||||||
|
return JsonResponse({'error': str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
|
||||||
|
|
||||||
|
|
||||||
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
|
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
|
||||||
"""Process episodes in batches for better performance"""
|
"""Process episodes in batches for better performance.
|
||||||
|
|
||||||
|
Note: Multiple streams can represent the same episode (e.g., different languages
|
||||||
|
or qualities). Each stream has a unique stream_id, but they share the same
|
||||||
|
season/episode number. We create one Episode record per (series, season, episode)
|
||||||
|
and multiple M3UEpisodeRelation records pointing to it.
|
||||||
|
"""
|
||||||
if not episodes_data:
|
if not episodes_data:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
|
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
|
||||||
|
|
||||||
# Extract episode identifiers
|
# Extract episode identifiers
|
||||||
episode_keys = []
|
# Note: episode_keys may have duplicates when multiple streams represent same episode
|
||||||
|
episode_keys = set() # Use set to track unique episode keys
|
||||||
episode_ids = []
|
episode_ids = []
|
||||||
for episode_data in all_episodes_data:
|
for episode_data in all_episodes_data:
|
||||||
season_num = episode_data['_season_number']
|
season_num = episode_data['_season_number']
|
||||||
episode_num = episode_data.get('episode_num', 0)
|
episode_num = episode_data.get('episode_num', 0)
|
||||||
episode_keys.append((series.id, season_num, episode_num))
|
episode_keys.add((series.id, season_num, episode_num))
|
||||||
episode_ids.append(str(episode_data.get('id')))
|
episode_ids.append(str(episode_data.get('id')))
|
||||||
|
|
||||||
# Pre-fetch existing episodes
|
# Pre-fetch existing episodes
|
||||||
|
|
@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
relations_to_create = []
|
relations_to_create = []
|
||||||
relations_to_update = []
|
relations_to_update = []
|
||||||
|
|
||||||
|
# Track episodes we're creating in this batch to avoid duplicates
|
||||||
|
# Key: (series_id, season_number, episode_number) -> Episode object
|
||||||
|
episodes_pending_creation = {}
|
||||||
|
|
||||||
for episode_data in all_episodes_data:
|
for episode_data in all_episodes_data:
|
||||||
try:
|
try:
|
||||||
episode_id = str(episode_data.get('id'))
|
episode_id = str(episode_data.get('id'))
|
||||||
|
|
@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
if backdrop:
|
if backdrop:
|
||||||
custom_props['backdrop_path'] = [backdrop]
|
custom_props['backdrop_path'] = [backdrop]
|
||||||
|
|
||||||
# Find existing episode
|
# Find existing episode - check DB first, then pending creations
|
||||||
episode_key = (series.id, season_number, episode_number)
|
episode_key = (series.id, season_number, episode_number)
|
||||||
episode = existing_episodes.get(episode_key)
|
episode = existing_episodes.get(episode_key)
|
||||||
|
|
||||||
|
# Check if we already have this episode pending creation (multiple streams for same episode)
|
||||||
|
if not episode and episode_key in episodes_pending_creation:
|
||||||
|
episode = episodes_pending_creation[episode_key]
|
||||||
|
logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})")
|
||||||
|
|
||||||
if episode:
|
if episode:
|
||||||
# Update existing episode
|
# Update existing episode
|
||||||
updated = False
|
updated = False
|
||||||
|
|
@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
episode.custom_properties = custom_props if custom_props else None
|
episode.custom_properties = custom_props if custom_props else None
|
||||||
updated = True
|
updated = True
|
||||||
|
|
||||||
if updated:
|
# Only add to update list if episode has a PK (exists in DB) and isn't already in list
|
||||||
|
# Episodes pending creation don't have PKs yet and will be created via bulk_create
|
||||||
|
if updated and episode.pk and episode not in episodes_to_update:
|
||||||
episodes_to_update.append(episode)
|
episodes_to_update.append(episode)
|
||||||
else:
|
else:
|
||||||
# Create new episode
|
# Create new episode
|
||||||
|
|
@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
custom_properties=custom_props if custom_props else None
|
custom_properties=custom_props if custom_props else None
|
||||||
)
|
)
|
||||||
episodes_to_create.append(episode)
|
episodes_to_create.append(episode)
|
||||||
|
# Track this episode so subsequent streams with same season/episode can reuse it
|
||||||
|
episodes_pending_creation[episode_key] = episode
|
||||||
|
|
||||||
# Handle episode relation
|
# Handle episode relation
|
||||||
if episode_id in existing_relations:
|
if episode_id in existing_relations:
|
||||||
|
|
@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
|
|
||||||
# Execute batch operations
|
# Execute batch operations
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
# Create new episodes
|
# Create new episodes - use ignore_conflicts in case of race conditions
|
||||||
if episodes_to_create:
|
if episodes_to_create:
|
||||||
Episode.objects.bulk_create(episodes_to_create)
|
Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True)
|
||||||
|
|
||||||
|
# Re-fetch the created episodes to get their PKs
|
||||||
|
# We need to do this because bulk_create with ignore_conflicts doesn't set PKs
|
||||||
|
created_episode_keys = [
|
||||||
|
(ep.series_id, ep.season_number, ep.episode_number)
|
||||||
|
for ep in episodes_to_create
|
||||||
|
]
|
||||||
|
db_episodes = Episode.objects.filter(series=series)
|
||||||
|
episode_pk_map = {
|
||||||
|
(ep.series_id, ep.season_number, ep.episode_number): ep
|
||||||
|
for ep in db_episodes
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update relations to point to the actual DB episodes with PKs
|
||||||
|
for relation in relations_to_create:
|
||||||
|
ep = relation.episode
|
||||||
|
key = (ep.series_id, ep.season_number, ep.episode_number)
|
||||||
|
if key in episode_pk_map:
|
||||||
|
relation.episode = episode_pk_map[key]
|
||||||
|
|
||||||
# Update existing episodes
|
# Update existing episodes
|
||||||
if episodes_to_update:
|
if episodes_to_update:
|
||||||
|
|
@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
||||||
'tmdb_id', 'imdb_id', 'custom_properties'
|
'tmdb_id', 'imdb_id', 'custom_properties'
|
||||||
])
|
])
|
||||||
|
|
||||||
# Create new episode relations
|
# Create new episode relations - use ignore_conflicts for stream_id duplicates
|
||||||
if relations_to_create:
|
if relations_to_create:
|
||||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create)
|
M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||||
|
|
||||||
# Update existing episode relations
|
# Update existing episode relations
|
||||||
if relations_to_update:
|
if relations_to_update:
|
||||||
|
|
|
||||||
|
|
@ -142,8 +142,12 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
|
||||||
},
|
},
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(in_network, status=status.HTTP_200_OK)
|
response_data = {
|
||||||
|
**in_network,
|
||||||
|
"client_ip": str(client_ip)
|
||||||
|
}
|
||||||
|
return Response(response_data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
return Response({}, status=status.HTTP_200_OK)
|
return Response({}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -513,7 +513,8 @@ def rehash_streams(keys):
|
||||||
|
|
||||||
for obj in batch:
|
for obj in batch:
|
||||||
# Generate new hash
|
# Generate new hash
|
||||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id)
|
group_name = obj.channel_group.name if obj.channel_group else None
|
||||||
|
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id, group=group_name)
|
||||||
|
|
||||||
# Check if this hash already exists in our tracking dict or in database
|
# Check if this hash already exists in our tracking dict or in database
|
||||||
if new_hash in hash_keys:
|
if new_hash in hash_keys:
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,9 @@ def stream_view(request, channel_uuid):
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
redis_host = getattr(settings, "REDIS_HOST", "localhost")
|
redis_host = getattr(settings, "REDIS_HOST", "localhost")
|
||||||
redis_client = redis.Redis(host=settings.REDIS_HOST, port=6379, db=int(getattr(settings, "REDIS_DB", "0")))
|
redis_port = int(getattr(settings, "REDIS_PORT", 6379))
|
||||||
|
redis_db = int(getattr(settings, "REDIS_DB", "0"))
|
||||||
|
redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||||
|
|
||||||
# Retrieve the channel by the provided stream_id.
|
# Retrieve the channel by the provided stream_id.
|
||||||
channel = Channel.objects.get(uuid=channel_uuid)
|
channel = Channel.objects.get(uuid=channel_uuid)
|
||||||
|
|
|
||||||
|
|
@ -73,8 +73,12 @@ class PersistentLock:
|
||||||
|
|
||||||
# Example usage (for testing purposes only):
|
# Example usage (for testing purposes only):
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# Connect to Redis on localhost; adjust connection parameters as needed.
|
import os
|
||||||
client = redis.Redis(host="localhost", port=6379, db=0)
|
# Connect to Redis using environment variables; adjust connection parameters as needed.
|
||||||
|
redis_host = os.environ.get("REDIS_HOST", "localhost")
|
||||||
|
redis_port = int(os.environ.get("REDIS_PORT", 6379))
|
||||||
|
redis_db = int(os.environ.get("REDIS_DB", 0))
|
||||||
|
client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||||
lock = PersistentLock(client, "lock:example_account", lock_timeout=120)
|
lock = PersistentLock(client, "lock:example_account", lock_timeout=120)
|
||||||
|
|
||||||
if lock.acquire():
|
if lock.acquire():
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ BASE_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY")
|
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY")
|
||||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||||
|
REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
|
||||||
REDIS_DB = os.environ.get("REDIS_DB", "0")
|
REDIS_DB = os.environ.get("REDIS_DB", "0")
|
||||||
|
|
||||||
# Set DEBUG to True for development, False for production
|
# Set DEBUG to True for development, False for production
|
||||||
|
|
@ -118,7 +119,7 @@ CHANNEL_LAYERS = {
|
||||||
"default": {
|
"default": {
|
||||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||||
"CONFIG": {
|
"CONFIG": {
|
||||||
"hosts": [(REDIS_HOST, 6379, REDIS_DB)], # Ensure Redis is running
|
"hosts": [(REDIS_HOST, REDIS_PORT, REDIS_DB)], # Ensure Redis is running
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -184,8 +185,10 @@ STATICFILES_DIRS = [
|
||||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||||
AUTH_USER_MODEL = "accounts.User"
|
AUTH_USER_MODEL = "accounts.User"
|
||||||
|
|
||||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
# Build default Redis URL from components for Celery
|
||||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
_default_redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
|
||||||
|
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", _default_redis_url)
|
||||||
|
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", CELERY_BROKER_URL)
|
||||||
|
|
||||||
# Configure Redis key prefix
|
# Configure Redis key prefix
|
||||||
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
|
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
|
||||||
|
|
@ -249,7 +252,7 @@ SIMPLE_JWT = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# Redis connection settings
|
# Redis connection settings
|
||||||
REDIS_URL = "redis://localhost:6379/0"
|
REDIS_URL = os.environ.get("REDIS_URL", f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
|
||||||
REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds
|
REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds
|
||||||
REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds
|
REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds
|
||||||
REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds
|
REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,65 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile ..
|
set -e
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
VERSION=$(python3 -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
||||||
|
REGISTRY="dispatcharr" # Registry or private repo to push to
|
||||||
|
IMAGE="dispatcharr" # Image that we're building
|
||||||
|
BRANCH="dev"
|
||||||
|
ARCH="" # Architectures to build for, e.g. linux/amd64,linux/arm64
|
||||||
|
PUSH=false
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<- EOF
|
||||||
|
To test locally:
|
||||||
|
./build-dev.sh
|
||||||
|
|
||||||
|
To build and push to registry:
|
||||||
|
./build-dev.sh -p
|
||||||
|
|
||||||
|
To build and push to a private registry:
|
||||||
|
./build-dev.sh -p -r myregistry:5000
|
||||||
|
|
||||||
|
To build for -both- x86_64 and arm_64:
|
||||||
|
./build-dev.sh -p -a linux/amd64,linux/arm64
|
||||||
|
|
||||||
|
Do it all:
|
||||||
|
./build-dev.sh -p -r myregistry:5000 -a linux/amd64,linux/arm64
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse options
|
||||||
|
while getopts "pr:a:b:i:h" opt; do
|
||||||
|
case $opt in
|
||||||
|
r) REGISTRY="$OPTARG" ;;
|
||||||
|
a) ARCH="--platform $OPTARG" ;;
|
||||||
|
b) BRANCH="$OPTARG" ;;
|
||||||
|
i) IMAGE="$OPTARG" ;;
|
||||||
|
p) PUSH=true ;;
|
||||||
|
h) usage ;;
|
||||||
|
\?) echo "Invalid option: -$OPTARG" >&2; exit 1 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
BUILD_ARGS="BRANCH=$BRANCH"
|
||||||
|
|
||||||
|
echo docker build --build-arg $BUILD_ARGS $ARCH -t $IMAGE
|
||||||
|
docker build -f Dockerfile --build-arg $BUILD_ARGS $ARCH -t $IMAGE ..
|
||||||
|
docker tag $IMAGE $IMAGE:$BRANCH
|
||||||
|
docker tag $IMAGE $IMAGE:$VERSION
|
||||||
|
|
||||||
|
if [ -z "$PUSH" ]; then
|
||||||
|
echo "Please run 'docker push -t $IMAGE:dev -t $IMAGE:${VERSION}' when ready"
|
||||||
|
else
|
||||||
|
for TAG in latest "$VERSION" "$BRANCH"; do
|
||||||
|
docker tag "$IMAGE" "$REGISTRY/$IMAGE:$TAG"
|
||||||
|
docker push -q "$REGISTRY/$IMAGE:$TAG"
|
||||||
|
done
|
||||||
|
echo "Images pushed successfully."
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Get version information
|
|
||||||
VERSION=$(python -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
|
||||||
|
|
||||||
# Build with version tag
|
|
||||||
docker build --build-arg BRANCH=dev \
|
|
||||||
-t dispatcharr/dispatcharr:dev \
|
|
||||||
-t dispatcharr/dispatcharr:${VERSION} \
|
|
||||||
-f Dockerfile ..
|
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,10 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
# Configure nginx port
|
# Configure nginx port
|
||||||
|
if ! [[ "$DISPATCHARR_PORT" =~ ^[0-9]+$ ]]; then
|
||||||
|
echo "⚠️ Warning: DISPATCHARR_PORT is not a valid integer, using default port 9191"
|
||||||
|
DISPATCHARR_PORT=9191
|
||||||
|
fi
|
||||||
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
||||||
|
|
||||||
# Configure nginx based on IPv6 availability
|
# Configure nginx based on IPv6 availability
|
||||||
|
|
|
||||||
1317
frontend/package-lock.json
generated
1317
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -54,18 +54,21 @@
|
||||||
"@types/react": "^19.1.0",
|
"@types/react": "^19.1.0",
|
||||||
"@types/react-dom": "^19.1.0",
|
"@types/react-dom": "^19.1.0",
|
||||||
"@vitejs/plugin-react-swc": "^4.1.0",
|
"@vitejs/plugin-react-swc": "^4.1.0",
|
||||||
"eslint": "^9.21.0",
|
"eslint": "^9.27.0",
|
||||||
"eslint-plugin-react-hooks": "^5.1.0",
|
"eslint-plugin-react-hooks": "^5.1.0",
|
||||||
"eslint-plugin-react-refresh": "^0.4.19",
|
"eslint-plugin-react-refresh": "^0.4.19",
|
||||||
"globals": "^15.15.0",
|
"globals": "^15.15.0",
|
||||||
"jsdom": "^27.0.0",
|
"jsdom": "^27.0.0",
|
||||||
"prettier": "^3.5.3",
|
"prettier": "^3.5.3",
|
||||||
"vite": "^6.2.0",
|
"vite": "^7.1.7",
|
||||||
"vitest": "^3.2.4"
|
"vitest": "^3.2.4"
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"vite": "7.1.7",
|
"vite": "7.1.7",
|
||||||
"react": "19.1.0",
|
"react": "19.1.0",
|
||||||
"react-dom": "19.1.0"
|
"react-dom": "19.1.0"
|
||||||
|
},
|
||||||
|
"overrides": {
|
||||||
|
"js-yaml": "^4.1.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1691,6 +1691,19 @@ export default class API {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static async stopVODClient(clientId) {
|
||||||
|
try {
|
||||||
|
const response = await request(`${host}/proxy/vod/stop_client/`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: { client_id: clientId },
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
errorNotification('Failed to stop VOD client', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static async stopChannel(id) {
|
static async stopChannel(id) {
|
||||||
try {
|
try {
|
||||||
const response = await request(`${host}/proxy/ts/stop/${id}`, {
|
const response = await request(`${host}/proxy/ts/stop/${id}`, {
|
||||||
|
|
@ -2308,7 +2321,8 @@ export default class API {
|
||||||
|
|
||||||
static async deleteSeriesRule(tvgId) {
|
static async deleteSeriesRule(tvgId) {
|
||||||
try {
|
try {
|
||||||
await request(`${host}/api/channels/series-rules/${tvgId}/`, { method: 'DELETE' });
|
const encodedTvgId = encodeURIComponent(tvgId);
|
||||||
|
await request(`${host}/api/channels/series-rules/${encodedTvgId}/`, { method: 'DELETE' });
|
||||||
notifications.show({ title: 'Series rule removed' });
|
notifications.show({ title: 'Series rule removed' });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
errorNotification('Failed to remove series rule', e);
|
errorNotification('Failed to remove series rule', e);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
// frontend/src/components/FloatingVideo.js
|
// frontend/src/components/FloatingVideo.js
|
||||||
import React, { useEffect, useRef, useState } from 'react';
|
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||||
import Draggable from 'react-draggable';
|
import Draggable from 'react-draggable';
|
||||||
import useVideoStore from '../store/useVideoStore';
|
import useVideoStore from '../store/useVideoStore';
|
||||||
import mpegts from 'mpegts.js';
|
import mpegts from 'mpegts.js';
|
||||||
|
|
@ -17,7 +17,94 @@ export default function FloatingVideo() {
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [loadError, setLoadError] = useState(null);
|
const [loadError, setLoadError] = useState(null);
|
||||||
const [showOverlay, setShowOverlay] = useState(true);
|
const [showOverlay, setShowOverlay] = useState(true);
|
||||||
|
const [videoSize, setVideoSize] = useState({ width: 320, height: 180 });
|
||||||
|
const [isResizing, setIsResizing] = useState(false);
|
||||||
|
const resizeStateRef = useRef(null);
|
||||||
const overlayTimeoutRef = useRef(null);
|
const overlayTimeoutRef = useRef(null);
|
||||||
|
const aspectRatioRef = useRef(320 / 180);
|
||||||
|
const [dragPosition, setDragPosition] = useState(null);
|
||||||
|
const dragPositionRef = useRef(null);
|
||||||
|
const dragOffsetRef = useRef({ x: 0, y: 0 });
|
||||||
|
const initialPositionRef = useRef(null);
|
||||||
|
|
||||||
|
const MIN_WIDTH = 220;
|
||||||
|
const MIN_HEIGHT = 124;
|
||||||
|
const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging
|
||||||
|
const HEADER_HEIGHT = 38; // height of the close button header area
|
||||||
|
const ERROR_HEIGHT = 45; // approximate height of error message area when displayed
|
||||||
|
const HANDLE_SIZE = 18;
|
||||||
|
const HANDLE_OFFSET = 0;
|
||||||
|
const resizeHandleBaseStyle = {
|
||||||
|
position: 'absolute',
|
||||||
|
width: HANDLE_SIZE,
|
||||||
|
height: HANDLE_SIZE,
|
||||||
|
backgroundColor: 'transparent',
|
||||||
|
borderRadius: 6,
|
||||||
|
zIndex: 8,
|
||||||
|
touchAction: 'none',
|
||||||
|
};
|
||||||
|
const resizeHandles = [
|
||||||
|
{
|
||||||
|
id: 'bottom-right',
|
||||||
|
cursor: 'nwse-resize',
|
||||||
|
xDir: 1,
|
||||||
|
yDir: 1,
|
||||||
|
isLeft: false,
|
||||||
|
isTop: false,
|
||||||
|
style: {
|
||||||
|
bottom: HANDLE_OFFSET,
|
||||||
|
right: HANDLE_OFFSET,
|
||||||
|
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRadius: '0 0 6px 0',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'bottom-left',
|
||||||
|
cursor: 'nesw-resize',
|
||||||
|
xDir: -1,
|
||||||
|
yDir: 1,
|
||||||
|
isLeft: true,
|
||||||
|
isTop: false,
|
||||||
|
style: {
|
||||||
|
bottom: HANDLE_OFFSET,
|
||||||
|
left: HANDLE_OFFSET,
|
||||||
|
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRadius: '0 0 0 6px',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'top-right',
|
||||||
|
cursor: 'nesw-resize',
|
||||||
|
xDir: 1,
|
||||||
|
yDir: -1,
|
||||||
|
isLeft: false,
|
||||||
|
isTop: true,
|
||||||
|
style: {
|
||||||
|
top: HANDLE_OFFSET,
|
||||||
|
right: HANDLE_OFFSET,
|
||||||
|
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRadius: '0 6px 0 0',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'top-left',
|
||||||
|
cursor: 'nwse-resize',
|
||||||
|
xDir: -1,
|
||||||
|
yDir: -1,
|
||||||
|
isLeft: true,
|
||||||
|
isTop: true,
|
||||||
|
style: {
|
||||||
|
top: HANDLE_OFFSET,
|
||||||
|
left: HANDLE_OFFSET,
|
||||||
|
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||||
|
borderRadius: '6px 0 0 0',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
// Safely destroy the mpegts player to prevent errors
|
// Safely destroy the mpegts player to prevent errors
|
||||||
const safeDestroyPlayer = () => {
|
const safeDestroyPlayer = () => {
|
||||||
|
|
@ -315,24 +402,319 @@ export default function FloatingVideo() {
|
||||||
}, 50);
|
}, 50);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const clampToVisible = useCallback(
|
||||||
|
(x, y) => {
|
||||||
|
if (typeof window === 'undefined') return { x, y };
|
||||||
|
|
||||||
|
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||||
|
const minX = -(videoSize.width - VISIBLE_MARGIN);
|
||||||
|
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||||
|
const maxX = window.innerWidth - videoSize.width;
|
||||||
|
const maxY = window.innerHeight - totalHeight;
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: Math.min(Math.max(x, minX), maxX),
|
||||||
|
y: Math.min(Math.max(y, minY), maxY),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[
|
||||||
|
VISIBLE_MARGIN,
|
||||||
|
HEADER_HEIGHT,
|
||||||
|
ERROR_HEIGHT,
|
||||||
|
videoSize.height,
|
||||||
|
videoSize.width,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
const clampToVisibleWithSize = useCallback(
|
||||||
|
(x, y, width, height) => {
|
||||||
|
if (typeof window === 'undefined') return { x, y };
|
||||||
|
|
||||||
|
const totalHeight = height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||||
|
const minX = -(width - VISIBLE_MARGIN);
|
||||||
|
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||||
|
const maxX = window.innerWidth - width;
|
||||||
|
const maxY = window.innerHeight - totalHeight;
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: Math.min(Math.max(x, minX), maxX),
|
||||||
|
y: Math.min(Math.max(y, minY), maxY),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
[VISIBLE_MARGIN, HEADER_HEIGHT, ERROR_HEIGHT]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleResizeMove = useCallback(
|
||||||
|
(event) => {
|
||||||
|
if (!resizeStateRef.current) return;
|
||||||
|
|
||||||
|
const clientX =
|
||||||
|
event.touches && event.touches.length
|
||||||
|
? event.touches[0].clientX
|
||||||
|
: event.clientX;
|
||||||
|
const clientY =
|
||||||
|
event.touches && event.touches.length
|
||||||
|
? event.touches[0].clientY
|
||||||
|
: event.clientY;
|
||||||
|
|
||||||
|
const {
|
||||||
|
startX,
|
||||||
|
startY,
|
||||||
|
startWidth,
|
||||||
|
startHeight,
|
||||||
|
startPos,
|
||||||
|
handle,
|
||||||
|
aspectRatio,
|
||||||
|
} = resizeStateRef.current;
|
||||||
|
const deltaX = clientX - startX;
|
||||||
|
const deltaY = clientY - startY;
|
||||||
|
const widthDelta = deltaX * handle.xDir;
|
||||||
|
const heightDelta = deltaY * handle.yDir;
|
||||||
|
const ratio = aspectRatio || aspectRatioRef.current;
|
||||||
|
|
||||||
|
// Derive width/height while keeping the original aspect ratio
|
||||||
|
let nextWidth = startWidth + widthDelta;
|
||||||
|
let nextHeight = nextWidth / ratio;
|
||||||
|
|
||||||
|
// Allow vertical-driven resize if the user drags mostly vertically
|
||||||
|
if (Math.abs(deltaY) > Math.abs(deltaX)) {
|
||||||
|
nextHeight = startHeight + heightDelta;
|
||||||
|
nextWidth = nextHeight * ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Respect minimums while keeping the ratio
|
||||||
|
if (nextWidth < MIN_WIDTH) {
|
||||||
|
nextWidth = MIN_WIDTH;
|
||||||
|
nextHeight = nextWidth / ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextHeight < MIN_HEIGHT) {
|
||||||
|
nextHeight = MIN_HEIGHT;
|
||||||
|
nextWidth = nextHeight * ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep within viewport with a margin based on current position
|
||||||
|
const posX = startPos?.x ?? 0;
|
||||||
|
const posY = startPos?.y ?? 0;
|
||||||
|
const margin = VISIBLE_MARGIN;
|
||||||
|
let maxWidth = null;
|
||||||
|
let maxHeight = null;
|
||||||
|
|
||||||
|
if (!handle.isLeft) {
|
||||||
|
maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!handle.isTop) {
|
||||||
|
maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxWidth != null && nextWidth > maxWidth) {
|
||||||
|
nextWidth = maxWidth;
|
||||||
|
nextHeight = nextWidth / ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxHeight != null && nextHeight > maxHeight) {
|
||||||
|
nextHeight = maxHeight;
|
||||||
|
nextWidth = nextHeight * ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final pass to honor both bounds while keeping the ratio
|
||||||
|
if (maxWidth != null && nextWidth > maxWidth) {
|
||||||
|
nextWidth = maxWidth;
|
||||||
|
nextHeight = nextWidth / ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
setVideoSize({
|
||||||
|
width: Math.round(nextWidth),
|
||||||
|
height: Math.round(nextHeight),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (handle.isLeft || handle.isTop) {
|
||||||
|
let nextX = posX;
|
||||||
|
let nextY = posY;
|
||||||
|
|
||||||
|
if (handle.isLeft) {
|
||||||
|
nextX = posX + (startWidth - nextWidth);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (handle.isTop) {
|
||||||
|
nextY = posY + (startHeight - nextHeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
const clamped = clampToVisibleWithSize(
|
||||||
|
nextX,
|
||||||
|
nextY,
|
||||||
|
nextWidth,
|
||||||
|
nextHeight
|
||||||
|
);
|
||||||
|
|
||||||
|
if (handle.isLeft) {
|
||||||
|
nextX = clamped.x;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (handle.isTop) {
|
||||||
|
nextY = clamped.y;
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextPos = { x: nextX, y: nextY };
|
||||||
|
setDragPosition(nextPos);
|
||||||
|
dragPositionRef.current = nextPos;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN, clampToVisibleWithSize]
|
||||||
|
);
|
||||||
|
|
||||||
|
const endResize = useCallback(() => {
|
||||||
|
setIsResizing(false);
|
||||||
|
resizeStateRef.current = null;
|
||||||
|
window.removeEventListener('mousemove', handleResizeMove);
|
||||||
|
window.removeEventListener('mouseup', endResize);
|
||||||
|
window.removeEventListener('touchmove', handleResizeMove);
|
||||||
|
window.removeEventListener('touchend', endResize);
|
||||||
|
}, [handleResizeMove]);
|
||||||
|
|
||||||
|
const startResize = (event, handle) => {
|
||||||
|
event.stopPropagation();
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
|
const clientX =
|
||||||
|
event.touches && event.touches.length
|
||||||
|
? event.touches[0].clientX
|
||||||
|
: event.clientX;
|
||||||
|
const clientY =
|
||||||
|
event.touches && event.touches.length
|
||||||
|
? event.touches[0].clientY
|
||||||
|
: event.clientY;
|
||||||
|
|
||||||
|
const aspectRatio =
|
||||||
|
videoSize.height > 0
|
||||||
|
? videoSize.width / videoSize.height
|
||||||
|
: aspectRatioRef.current;
|
||||||
|
aspectRatioRef.current = aspectRatio;
|
||||||
|
const startPos = dragPositionRef.current ||
|
||||||
|
initialPositionRef.current || { x: 0, y: 0 };
|
||||||
|
|
||||||
|
resizeStateRef.current = {
|
||||||
|
startX: clientX,
|
||||||
|
startY: clientY,
|
||||||
|
startWidth: videoSize.width,
|
||||||
|
startHeight: videoSize.height,
|
||||||
|
aspectRatio,
|
||||||
|
startPos,
|
||||||
|
handle,
|
||||||
|
};
|
||||||
|
|
||||||
|
setIsResizing(true);
|
||||||
|
|
||||||
|
window.addEventListener('mousemove', handleResizeMove);
|
||||||
|
window.addEventListener('mouseup', endResize);
|
||||||
|
window.addEventListener('touchmove', handleResizeMove);
|
||||||
|
window.addEventListener('touchend', endResize);
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
endResize();
|
||||||
|
};
|
||||||
|
}, [endResize]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
dragPositionRef.current = dragPosition;
|
||||||
|
}, [dragPosition]);
|
||||||
|
|
||||||
|
// Initialize the floating window near bottom-right once
|
||||||
|
useEffect(() => {
|
||||||
|
if (initialPositionRef.current || typeof window === 'undefined') return;
|
||||||
|
|
||||||
|
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||||
|
const initialX = Math.max(10, window.innerWidth - videoSize.width - 20);
|
||||||
|
const initialY = Math.max(10, window.innerHeight - totalHeight - 20);
|
||||||
|
const pos = clampToVisible(initialX, initialY);
|
||||||
|
|
||||||
|
initialPositionRef.current = pos;
|
||||||
|
setDragPosition(pos);
|
||||||
|
dragPositionRef.current = pos;
|
||||||
|
}, [
|
||||||
|
clampToVisible,
|
||||||
|
videoSize.height,
|
||||||
|
videoSize.width,
|
||||||
|
HEADER_HEIGHT,
|
||||||
|
ERROR_HEIGHT,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const handleDragStart = useCallback(
|
||||||
|
(event, data) => {
|
||||||
|
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||||
|
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||||
|
const rect = videoContainerRef.current?.getBoundingClientRect();
|
||||||
|
|
||||||
|
if (clientX != null && clientY != null && rect) {
|
||||||
|
dragOffsetRef.current = {
|
||||||
|
x: clientX - rect.left,
|
||||||
|
y: clientY - rect.top,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
dragOffsetRef.current = { x: 0, y: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||||
|
setDragPosition(clamped);
|
||||||
|
dragPositionRef.current = clamped;
|
||||||
|
},
|
||||||
|
[clampToVisible]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleDrag = useCallback(
|
||||||
|
(event) => {
|
||||||
|
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||||
|
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||||
|
if (clientX == null || clientY == null) return;
|
||||||
|
|
||||||
|
const nextX = clientX - (dragOffsetRef.current?.x ?? 0);
|
||||||
|
const nextY = clientY - (dragOffsetRef.current?.y ?? 0);
|
||||||
|
const clamped = clampToVisible(nextX, nextY);
|
||||||
|
setDragPosition(clamped);
|
||||||
|
dragPositionRef.current = clamped;
|
||||||
|
},
|
||||||
|
[clampToVisible]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleDragStop = useCallback(
|
||||||
|
(_, data) => {
|
||||||
|
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||||
|
setDragPosition(clamped);
|
||||||
|
dragPositionRef.current = clamped;
|
||||||
|
},
|
||||||
|
[clampToVisible]
|
||||||
|
);
|
||||||
|
|
||||||
// If the floating video is hidden or no URL is selected, do not render
|
// If the floating video is hidden or no URL is selected, do not render
|
||||||
if (!isVisible || !streamUrl) {
|
if (!isVisible || !streamUrl) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Draggable nodeRef={videoContainerRef}>
|
<Draggable
|
||||||
|
nodeRef={videoContainerRef}
|
||||||
|
cancel=".floating-video-no-drag"
|
||||||
|
disabled={isResizing}
|
||||||
|
position={dragPosition || undefined}
|
||||||
|
defaultPosition={initialPositionRef.current || { x: 0, y: 0 }}
|
||||||
|
onStart={handleDragStart}
|
||||||
|
onDrag={handleDrag}
|
||||||
|
onStop={handleDragStop}
|
||||||
|
>
|
||||||
<div
|
<div
|
||||||
ref={videoContainerRef}
|
ref={videoContainerRef}
|
||||||
style={{
|
style={{
|
||||||
position: 'fixed',
|
position: 'fixed',
|
||||||
bottom: '20px',
|
top: 0,
|
||||||
right: '20px',
|
left: 0,
|
||||||
width: '320px',
|
width: `${videoSize.width}px`,
|
||||||
zIndex: 9999,
|
zIndex: 9999,
|
||||||
backgroundColor: '#333',
|
backgroundColor: '#333',
|
||||||
borderRadius: '8px',
|
borderRadius: '8px',
|
||||||
overflow: 'hidden',
|
overflow: 'visible',
|
||||||
boxShadow: '0 2px 10px rgba(0,0,0,0.7)',
|
boxShadow: '0 2px 10px rgba(0,0,0,0.7)',
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
|
@ -378,10 +760,12 @@ export default function FloatingVideo() {
|
||||||
<video
|
<video
|
||||||
ref={videoRef}
|
ref={videoRef}
|
||||||
controls
|
controls
|
||||||
|
className="floating-video-no-drag"
|
||||||
style={{
|
style={{
|
||||||
width: '100%',
|
width: '100%',
|
||||||
height: '180px',
|
height: `${videoSize.height}px`,
|
||||||
backgroundColor: '#000',
|
backgroundColor: '#000',
|
||||||
|
borderRadius: '0 0 8px 8px',
|
||||||
// Better controls styling for VOD
|
// Better controls styling for VOD
|
||||||
...(contentType === 'vod' && {
|
...(contentType === 'vod' && {
|
||||||
controlsList: 'nodownload',
|
controlsList: 'nodownload',
|
||||||
|
|
@ -468,6 +852,21 @@ export default function FloatingVideo() {
|
||||||
</Text>
|
</Text>
|
||||||
</Box>
|
</Box>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Resize handles */}
|
||||||
|
{resizeHandles.map((handle) => (
|
||||||
|
<Box
|
||||||
|
key={handle.id}
|
||||||
|
className="floating-video-no-drag"
|
||||||
|
onMouseDown={(event) => startResize(event, handle)}
|
||||||
|
onTouchStart={(event) => startResize(event, handle)}
|
||||||
|
style={{
|
||||||
|
...resizeHandleBaseStyle,
|
||||||
|
...handle.style,
|
||||||
|
cursor: handle.cursor,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
</Draggable>
|
</Draggable>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -149,6 +149,9 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
resetForm();
|
resetForm();
|
||||||
|
// Reset local state to sync with formik reset
|
||||||
|
setSearchPattern('');
|
||||||
|
setReplacePattern('');
|
||||||
setSubmitting(false);
|
setSubmitting(false);
|
||||||
onClose();
|
onClose();
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,19 @@
|
||||||
// frontend/src/components/forms/SuperuserForm.js
|
// frontend/src/components/forms/SuperuserForm.js
|
||||||
import React, { useState } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { TextInput, Center, Button, Paper, Title, Stack } from '@mantine/core';
|
import {
|
||||||
|
TextInput,
|
||||||
|
Center,
|
||||||
|
Button,
|
||||||
|
Paper,
|
||||||
|
Title,
|
||||||
|
Stack,
|
||||||
|
Text,
|
||||||
|
Image,
|
||||||
|
Divider,
|
||||||
|
} from '@mantine/core';
|
||||||
import API from '../../api';
|
import API from '../../api';
|
||||||
import useAuthStore from '../../store/auth';
|
import useAuthStore from '../../store/auth';
|
||||||
|
import logo from '../../assets/logo.png';
|
||||||
|
|
||||||
function SuperuserForm() {
|
function SuperuserForm() {
|
||||||
const [formData, setFormData] = useState({
|
const [formData, setFormData] = useState({
|
||||||
|
|
@ -11,8 +22,16 @@ function SuperuserForm() {
|
||||||
email: '',
|
email: '',
|
||||||
});
|
});
|
||||||
const [error, setError] = useState('');
|
const [error, setError] = useState('');
|
||||||
|
const [version, setVersion] = useState(null);
|
||||||
const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists);
|
const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Fetch version info
|
||||||
|
API.getVersion().then((data) => {
|
||||||
|
setVersion(data?.version);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
const handleChange = (e) => {
|
const handleChange = (e) => {
|
||||||
setFormData((prev) => ({
|
setFormData((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
|
|
@ -46,11 +65,29 @@ function SuperuserForm() {
|
||||||
>
|
>
|
||||||
<Paper
|
<Paper
|
||||||
elevation={3}
|
elevation={3}
|
||||||
style={{ padding: 30, width: '100%', maxWidth: 400 }}
|
style={{
|
||||||
|
padding: 30,
|
||||||
|
width: '100%',
|
||||||
|
maxWidth: 500,
|
||||||
|
position: 'relative',
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
<Title order={4} align="center">
|
<Stack align="center" spacing="lg">
|
||||||
Create your Super User Account
|
<Image
|
||||||
</Title>
|
src={logo}
|
||||||
|
alt="Dispatcharr Logo"
|
||||||
|
width={120}
|
||||||
|
height={120}
|
||||||
|
fit="contain"
|
||||||
|
/>
|
||||||
|
<Title order={2} align="center">
|
||||||
|
Dispatcharr
|
||||||
|
</Title>
|
||||||
|
<Text size="sm" color="dimmed" align="center">
|
||||||
|
Welcome! Create your Super User Account to get started.
|
||||||
|
</Text>
|
||||||
|
<Divider style={{ width: '100%' }} />
|
||||||
|
</Stack>
|
||||||
<form onSubmit={handleSubmit}>
|
<form onSubmit={handleSubmit}>
|
||||||
<Stack>
|
<Stack>
|
||||||
<TextInput
|
<TextInput
|
||||||
|
|
@ -77,11 +114,25 @@ function SuperuserForm() {
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<Button type="submit" size="sm" sx={{ pt: 1 }}>
|
<Button type="submit" fullWidth>
|
||||||
Submit
|
Create Account
|
||||||
</Button>
|
</Button>
|
||||||
</Stack>
|
</Stack>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
{version && (
|
||||||
|
<Text
|
||||||
|
size="xs"
|
||||||
|
color="dimmed"
|
||||||
|
style={{
|
||||||
|
position: 'absolute',
|
||||||
|
bottom: 6,
|
||||||
|
right: 30,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
v{version}
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
</Paper>
|
</Paper>
|
||||||
</Center>
|
</Center>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -68,7 +68,7 @@ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/
|
||||||
const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`;
|
const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`;
|
||||||
|
|
||||||
const ChannelEnabledSwitch = React.memo(
|
const ChannelEnabledSwitch = React.memo(
|
||||||
({ rowId, selectedProfileId, selectedTableIds, setSelectedTableIds }) => {
|
({ rowId, selectedProfileId, selectedTableIds }) => {
|
||||||
// Directly extract the channels set once to avoid re-renders on every change.
|
// Directly extract the channels set once to avoid re-renders on every change.
|
||||||
const isEnabled = useChannelsStore(
|
const isEnabled = useChannelsStore(
|
||||||
useCallback(
|
useCallback(
|
||||||
|
|
@ -79,20 +79,16 @@ const ChannelEnabledSwitch = React.memo(
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleToggle = async () => {
|
const handleToggle = () => {
|
||||||
if (selectedTableIds.length > 1) {
|
if (selectedTableIds.length > 1) {
|
||||||
await API.updateProfileChannels(
|
API.updateProfileChannels(
|
||||||
selectedTableIds,
|
selectedTableIds,
|
||||||
selectedProfileId,
|
selectedProfileId,
|
||||||
!isEnabled
|
!isEnabled
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
|
API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
setSelectedTableIds([]);
|
|
||||||
|
|
||||||
return API.requeryChannels();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
@ -751,7 +747,6 @@ const ChannelsTable = ({}) => {
|
||||||
rowId={row.original.id}
|
rowId={row.original.id}
|
||||||
selectedProfileId={selectedProfileId}
|
selectedProfileId={selectedProfileId}
|
||||||
selectedTableIds={table.getState().selectedTableIds}
|
selectedTableIds={table.getState().selectedTableIds}
|
||||||
setSelectedTableIds={table.setSelectedTableIds}
|
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -191,6 +191,8 @@ const SettingsPage = () => {
|
||||||
useState(false);
|
useState(false);
|
||||||
const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] =
|
const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] =
|
||||||
useState([]);
|
useState([]);
|
||||||
|
|
||||||
|
const [clientIpAddress, setClientIpAddress] = useState(null);
|
||||||
|
|
||||||
const [proxySettingsSaved, setProxySettingsSaved] = useState(false);
|
const [proxySettingsSaved, setProxySettingsSaved] = useState(false);
|
||||||
const [generalSettingsSaved, setGeneralSettingsSaved] = useState(false);
|
const [generalSettingsSaved, setGeneralSettingsSaved] = useState(false);
|
||||||
|
|
@ -508,6 +510,9 @@ const SettingsPage = () => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Store the client IP
|
||||||
|
setClientIpAddress(check.client_ip);
|
||||||
|
|
||||||
// For now, only warn if we're blocking the UI
|
// For now, only warn if we're blocking the UI
|
||||||
const blockedAccess = check.UI;
|
const blockedAccess = check.UI;
|
||||||
if (blockedAccess.length == 0) {
|
if (blockedAccess.length == 0) {
|
||||||
|
|
@ -1056,6 +1061,10 @@ const SettingsPage = () => {
|
||||||
value: 'm3u_id',
|
value: 'm3u_id',
|
||||||
label: 'M3U ID',
|
label: 'M3U ID',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
value: 'group',
|
||||||
|
label: 'Group',
|
||||||
|
},
|
||||||
]}
|
]}
|
||||||
{...form.getInputProps('m3u-hash-key')}
|
{...form.getInputProps('m3u-hash-key')}
|
||||||
key={form.key('m3u-hash-key')}
|
key={form.key('m3u-hash-key')}
|
||||||
|
|
@ -1361,7 +1370,7 @@ Please ensure you have time to let this complete before proceeding.`}
|
||||||
message={
|
message={
|
||||||
<>
|
<>
|
||||||
<Text>
|
<Text>
|
||||||
Your client is not included in the allowed networks for the web
|
Your client {clientIpAddress && `(${clientIpAddress}) `}is not included in the allowed networks for the web
|
||||||
UI. Are you sure you want to proceed?
|
UI. Are you sure you want to proceed?
|
||||||
</Text>
|
</Text>
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -89,7 +89,7 @@ const getStartDate = (uptime) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create a VOD Card component similar to ChannelCard
|
// Create a VOD Card component similar to ChannelCard
|
||||||
const VODCard = ({ vodContent }) => {
|
const VODCard = ({ vodContent, stopVODClient }) => {
|
||||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||||
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
||||||
const [isClientExpanded, setIsClientExpanded] = useState(false);
|
const [isClientExpanded, setIsClientExpanded] = useState(false);
|
||||||
|
|
@ -329,6 +329,19 @@ const VODCard = ({ vodContent }) => {
|
||||||
</Center>
|
</Center>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
|
{connection && stopVODClient && (
|
||||||
|
<Center>
|
||||||
|
<Tooltip label="Stop VOD Connection">
|
||||||
|
<ActionIcon
|
||||||
|
variant="transparent"
|
||||||
|
color="red.9"
|
||||||
|
onClick={() => stopVODClient(connection.client_id)}
|
||||||
|
>
|
||||||
|
<SquareX size="24" />
|
||||||
|
</ActionIcon>
|
||||||
|
</Tooltip>
|
||||||
|
</Center>
|
||||||
|
)}
|
||||||
</Group>
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
|
|
||||||
|
|
@ -468,8 +481,8 @@ const VODCard = ({ vodContent }) => {
|
||||||
size={16}
|
size={16}
|
||||||
style={{
|
style={{
|
||||||
transform: isClientExpanded
|
transform: isClientExpanded
|
||||||
? 'rotate(180deg)'
|
? 'rotate(0deg)'
|
||||||
: 'rotate(0deg)',
|
: 'rotate(180deg)',
|
||||||
transition: 'transform 0.2s',
|
transition: 'transform 0.2s',
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
@ -1297,6 +1310,12 @@ const ChannelsPage = () => {
|
||||||
await API.stopClient(channelId, clientId);
|
await API.stopClient(channelId, clientId);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const stopVODClient = async (clientId) => {
|
||||||
|
await API.stopVODClient(clientId);
|
||||||
|
// Refresh VOD stats after stopping to update the UI
|
||||||
|
fetchVODStats();
|
||||||
|
};
|
||||||
|
|
||||||
// Function to fetch channel stats from API
|
// Function to fetch channel stats from API
|
||||||
const fetchChannelStats = useCallback(async () => {
|
const fetchChannelStats = useCallback(async () => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -1585,7 +1604,11 @@ const ChannelsPage = () => {
|
||||||
);
|
);
|
||||||
} else if (connection.type === 'vod') {
|
} else if (connection.type === 'vod') {
|
||||||
return (
|
return (
|
||||||
<VODCard key={connection.id} vodContent={connection.data} />
|
<VODCard
|
||||||
|
key={connection.id}
|
||||||
|
vodContent={connection.data}
|
||||||
|
stopVODClient={stopVODClient}
|
||||||
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
|
|
||||||
|
|
@ -1,32 +1,32 @@
|
||||||
Django==5.2.4
|
Django==5.2.9
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.11
|
||||||
celery[redis]==5.5.3
|
celery[redis]==5.6.0
|
||||||
djangorestframework==3.16.0
|
djangorestframework==3.16.1
|
||||||
requests==2.32.4
|
requests==2.32.5
|
||||||
psutil==7.0.0
|
psutil==7.1.3
|
||||||
pillow
|
pillow
|
||||||
drf-yasg>=1.20.0
|
drf-yasg>=1.21.11
|
||||||
streamlink
|
streamlink
|
||||||
python-vlc
|
python-vlc
|
||||||
yt-dlp
|
yt-dlp
|
||||||
gevent==25.5.1
|
gevent==25.9.1
|
||||||
daphne
|
daphne
|
||||||
uwsgi
|
uwsgi
|
||||||
django-cors-headers
|
django-cors-headers
|
||||||
djangorestframework-simplejwt
|
djangorestframework-simplejwt
|
||||||
m3u8
|
m3u8
|
||||||
rapidfuzz==3.13.0
|
rapidfuzz==3.14.3
|
||||||
regex # Required by transformers but also used for advanced regex features
|
regex # Required by transformers but also used for advanced regex features
|
||||||
tzlocal
|
tzlocal
|
||||||
|
|
||||||
# PyTorch dependencies (CPU only)
|
# PyTorch dependencies (CPU only)
|
||||||
--extra-index-url https://download.pytorch.org/whl/cpu/
|
--extra-index-url https://download.pytorch.org/whl/cpu/
|
||||||
torch==2.7.1+cpu
|
torch==2.9.1+cpu
|
||||||
|
|
||||||
# ML/NLP dependencies
|
# ML/NLP dependencies
|
||||||
sentence-transformers==5.1.0
|
sentence-transformers==5.2.0
|
||||||
channels
|
channels
|
||||||
channels-redis==4.3.0
|
channels-redis==4.3.0
|
||||||
django-filter
|
django-filter
|
||||||
django-celery-beat
|
django-celery-beat
|
||||||
lxml==6.0.0
|
lxml==6.0.2
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
"""
|
"""
|
||||||
Dispatcharr version information.
|
Dispatcharr version information.
|
||||||
"""
|
"""
|
||||||
__version__ = '0.14.0' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
__version__ = '0.15.1' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||||
__timestamp__ = None # Set during CI/CD build process
|
__timestamp__ = None # Set during CI/CD build process
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue