mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Compare commits
308 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8521df94ad | ||
|
|
c970cfcf9a | ||
|
|
fe60c4f3bc | ||
|
|
7cf7aecdf2 | ||
|
|
54644df9a3 | ||
|
|
38fa0fe99d | ||
|
|
a772f5c353 | ||
|
|
da186bcb9d | ||
|
|
75df00e329 | ||
|
|
d0ed682b3d | ||
|
|
60955a39c7 | ||
|
|
6c15ae940d | ||
|
|
516d0e02aa | ||
|
|
6607cef5d4 | ||
|
|
2f9b544519 | ||
|
|
36967c10ce | ||
|
|
4bfdd15b37 | ||
|
|
2a3d0db670 | ||
|
|
43636a84d0 | ||
|
|
6d5d16d667 | ||
|
|
f821dabe8e | ||
|
|
564dceb210 | ||
|
|
2e9280cf59 | ||
|
|
7594ba0a08 | ||
|
|
e8d949db86 | ||
|
|
a9a433bc5b | ||
|
|
e72e0215cb | ||
|
|
b8374fcc68 | ||
|
|
6b873be3cf | ||
|
|
edfa497203 | ||
|
|
0242eb69ee | ||
|
|
93f74c9d91 | ||
|
|
e2e6f61dee | ||
|
|
719a975210 | ||
|
|
a84553d15c | ||
|
|
cc9d38212e | ||
|
|
caf56a59f3 | ||
|
|
ba5aa861e3 | ||
|
|
312fa11cfb | ||
|
|
ad334347a9 | ||
|
|
74a9d3d0cb | ||
|
|
fa6315de33 | ||
|
|
d6c1a2369b | ||
|
|
72d9125c36 | ||
|
|
6e74c370cb | ||
|
|
10447f8c86 | ||
|
|
1a2d39de91 | ||
|
|
f389420251 | ||
|
|
3f6eff96fc | ||
|
|
02faa1a4a7 | ||
|
|
c5a3a2af81 | ||
|
|
01370e8892 | ||
|
|
8cbb55c44b | ||
|
|
0441dd7b7e | ||
|
|
30d093a2d3 | ||
|
|
518c93c398 | ||
|
|
cc09c89156 | ||
|
|
21c0758cc9 | ||
|
|
f664910bf4 | ||
|
|
bc19bf8629 | ||
|
|
16bbc1d875 | ||
|
|
9612a67412 | ||
|
|
4e65ffd113 | ||
|
|
6031885537 | ||
|
|
8ae1a98a3b | ||
|
|
48bdcfbd65 | ||
|
|
e151da27b9 | ||
|
|
fdca1fd165 | ||
|
|
9cc90354ee | ||
|
|
62b6cfa2fb | ||
|
|
3f46f28a70 | ||
|
|
058de26bdf | ||
|
|
f51463162c | ||
|
|
0cb189acba | ||
|
|
3fe5ff9130 | ||
|
|
131ebf9f55 | ||
|
|
2ed784e8c4 | ||
|
|
2e0aa90cd6 | ||
|
|
a363d9f0e6 | ||
|
|
6a985d7a7d | ||
|
|
1a67f3c8ec | ||
|
|
6bd8a0c12d | ||
|
|
6678311fa7 | ||
|
|
e8c9432f65 | ||
|
|
33f988b2c6 | ||
|
|
13e4b19960 | ||
|
|
042c34eecc | ||
|
|
ded785de54 | ||
|
|
c57f9fd7e7 | ||
|
|
b4b0774189 | ||
|
|
7b1a85617f | ||
|
|
a6361a07d2 | ||
|
|
b157159b87 | ||
|
|
d9fc0e68d6 | ||
|
|
43525ca32a | ||
|
|
ffa1331c3b | ||
|
|
26d9dbd246 | ||
|
|
f97399de07 | ||
|
|
a5688605cd | ||
|
|
ca96adf781 | ||
|
|
61247a452a | ||
|
|
fda188e738 | ||
|
|
57a6a842b2 | ||
|
|
f1c096bc94 | ||
|
|
5a4be532fd | ||
|
|
cc3ed80e1a | ||
|
|
af88756197 | ||
|
|
1b1f360705 | ||
|
|
bc3ef1a3a9 | ||
|
|
81af73a086 | ||
|
|
0abacf1fef | ||
|
|
36a39cd4de | ||
|
|
46413b7e3a | ||
|
|
874e981449 | ||
|
|
f5c6d2b576 | ||
|
|
1ef5a9ca13 | ||
|
|
2d31eca93d | ||
|
|
510c9fc617 | ||
|
|
8f63659ad7 | ||
|
|
31b9868bfd | ||
|
|
da4597ac95 | ||
|
|
523a127c81 | ||
|
|
ec3093d9af | ||
|
|
5481b18d8a | ||
|
|
bfca663870 | ||
|
|
11b3320277 | ||
|
|
44a122924f | ||
|
|
48ebaffadd | ||
|
|
daa919c764 | ||
|
|
8f811f2ed3 | ||
|
|
ff7298a93e | ||
|
|
9c9cbab94c | ||
|
|
904500906c | ||
|
|
106ea72c9d | ||
|
|
eea84cfd8b | ||
|
|
c7590d204e | ||
|
|
7a0af3445a | ||
|
|
18645fc08f | ||
|
|
aa5db6c3f4 | ||
|
|
1029eb5b5c | ||
|
|
ee183a9f75 | ||
|
|
63daa3ddf2 | ||
|
|
4cd63bc898 | ||
|
|
05b62c22ad | ||
|
|
2c12e8b872 | ||
|
|
20182c7ebf | ||
|
|
f0a9a3fc15 | ||
|
|
097551ccf7 | ||
|
|
22527b085d | ||
|
|
944736612b | ||
|
|
abc6ae94e5 | ||
|
|
5371519d8a | ||
|
|
b83f12809f | ||
|
|
601f7d0297 | ||
|
|
de31826137 | ||
|
|
e78c18c473 | ||
|
|
73956924f5 | ||
|
|
0a4d27c236 | ||
|
|
45ea63e9cf | ||
|
|
1510197bf0 | ||
|
|
9623dff6b1 | ||
|
|
3ddcadb50d | ||
|
|
1e42aa1011 | ||
|
|
ee0502f559 | ||
|
|
f43de44946 | ||
|
|
2b1d5622a6 | ||
|
|
bd148a7f14 | ||
|
|
a76a81c7f4 | ||
|
|
bd57ee3f3c | ||
|
|
2558ea0b0b | ||
|
|
2a0df81c59 | ||
|
|
1906c9955e | ||
|
|
4c60ce0c28 | ||
|
|
865ba432d3 | ||
|
|
7ea843956b | ||
|
|
98a016a418 | ||
|
|
36ec2fb1b0 | ||
|
|
dd75b5b21a | ||
|
|
38033da90f | ||
|
|
7c45542332 | ||
|
|
748d5dc72d | ||
|
|
48e7060cdb | ||
|
|
6c1b0f9a60 | ||
|
|
ffd8d9fe6b | ||
|
|
0ba22df233 | ||
|
|
bc72b2d4a3 | ||
|
|
88c10e85c3 | ||
|
|
1ad8d6cdfd | ||
|
|
ee7a39fe21 | ||
|
|
3b7f6dadaa | ||
|
|
41642cd479 | ||
|
|
1b27472c81 | ||
|
|
a60fd530f3 | ||
|
|
4878e92f44 | ||
|
|
3bf8ddf376 | ||
|
|
65dbc5498d | ||
|
|
85390a078c | ||
|
|
bd6cf287dc | ||
|
|
662c5ff89a | ||
|
|
1dc7700a62 | ||
|
|
d97f0c907f | ||
|
|
ae60f81314 | ||
|
|
bfcc47c331 | ||
|
|
679adb324c | ||
|
|
58a6cdedf7 | ||
|
|
dedd898a29 | ||
|
|
0b09cd18b9 | ||
|
|
3537c9ee09 | ||
|
|
97930c3de8 | ||
|
|
c51916b40c | ||
|
|
ed61ac656a | ||
|
|
56cf37d637 | ||
|
|
ea38c0b4b8 | ||
|
|
dd5ae8450d | ||
|
|
0070d9e500 | ||
|
|
aea888238a | ||
|
|
700d0d2383 | ||
|
|
0bfd06a5a3 | ||
|
|
8388152d79 | ||
|
|
795934dafe | ||
|
|
70e574e25a | ||
|
|
3c76c72479 | ||
|
|
53159bd420 | ||
|
|
901cc09e38 | ||
|
|
d4fbc9dc61 | ||
|
|
1a350e79e0 | ||
|
|
e71e6bc3d7 | ||
|
|
c65df2de89 | ||
|
|
5fbcaa91e0 | ||
|
|
d718e5a142 | ||
|
|
806f78244d | ||
|
|
e8fb01ebdd | ||
|
|
514e7e06e4 | ||
|
|
69f9ecd93c | ||
|
|
4df4e5f963 | ||
|
|
ecbef65891 | ||
|
|
98b29f97a1 | ||
|
|
62f5c32609 | ||
|
|
43b55e2d99 | ||
|
|
c03ddf60a0 | ||
|
|
ce70b04097 | ||
|
|
e2736babaa | ||
|
|
2155229d7f | ||
|
|
cf37c6fd98 | ||
|
|
3512c3a623 | ||
|
|
d0edc3fa07 | ||
|
|
b18bc62983 | ||
|
|
a912055255 | ||
|
|
10f329d673 | ||
|
|
f3a901cb3a | ||
|
|
759569b871 | ||
|
|
c1d960138e | ||
|
|
0d177e44f8 | ||
|
|
3b34fb11ef | ||
|
|
6c8270d0e5 | ||
|
|
5693ee7f9e | ||
|
|
256ac2f55a | ||
|
|
2a8ba9125c | ||
|
|
2de6ac5da1 | ||
|
|
6a96b6b485 | ||
|
|
5fce83fb51 | ||
|
|
81b6570366 | ||
|
|
042612c677 | ||
|
|
e64002dfc4 | ||
|
|
70cf8928c4 | ||
|
|
3f9fd424e2 | ||
|
|
f38fb36eba | ||
|
|
5e1ae23c4e | ||
|
|
53a50474ba | ||
|
|
92ced69bfd | ||
|
|
f1320c9a5d | ||
|
|
5b193249a8 | ||
|
|
0571c6801a | ||
|
|
c57c7d64de | ||
|
|
0bf3499917 | ||
|
|
3cb695279a | ||
|
|
2c5fbaffb4 | ||
|
|
85b5b18a57 | ||
|
|
be0409bfc2 | ||
|
|
bd3709463a | ||
|
|
cf08e54bd8 | ||
|
|
641dcfc21e | ||
|
|
43949c3ef4 | ||
|
|
6a9b5282cd | ||
|
|
b791190e3b | ||
|
|
1d23ed3685 | ||
|
|
3fb18ecce8 | ||
|
|
3eaa76174e | ||
|
|
2b58d7d46e | ||
|
|
fb084d013b | ||
|
|
8754839c81 | ||
|
|
13ad62d3e1 | ||
|
|
0997cd7a9d | ||
|
|
962d5e965b | ||
|
|
7673cd0793 | ||
|
|
aae7b1bc14 | ||
|
|
e7700b60f3 | ||
|
|
aa9fa09822 | ||
|
|
c5f6d8ccf3 | ||
|
|
cb1953baf2 | ||
|
|
d94d615d76 | ||
|
|
05f98e9275 | ||
|
|
db276f6d32 | ||
|
|
89a23164ff | ||
|
|
1f0fe00cbf | ||
|
|
204a5a0c76 | ||
|
|
bbe1f6364b | ||
|
|
0dbc5221b2 |
219 changed files with 29651 additions and 8909 deletions
|
|
@ -31,3 +31,4 @@
|
|||
LICENSE
|
||||
README.md
|
||||
data/
|
||||
docker/data/
|
||||
|
|
|
|||
87
.github/workflows/base-image.yml
vendored
87
.github/workflows/base-image.yml
vendored
|
|
@ -101,6 +101,28 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker base image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
|
|
@ -113,6 +135,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -154,18 +177,74 @@ jobs:
|
|||
|
||||
# GitHub Container Registry manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
89
.github/workflows/ci.yml
vendored
89
.github/workflows/ci.yml
vendored
|
|
@ -3,6 +3,8 @@ name: CI Pipeline
|
|||
on:
|
||||
push:
|
||||
branches: [dev]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches: [dev]
|
||||
workflow_dispatch:
|
||||
|
|
@ -117,7 +119,27 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# use metadata from the prepare job
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
|
|
@ -135,6 +157,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -179,16 +202,72 @@ jobs:
|
|||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# branch tag (e.g. latest or dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# version + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
||||
# also create Docker Hub manifests using the same username
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
41
.github/workflows/frontend-tests.yml
vendored
Normal file
41
.github/workflows/frontend-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
name: Frontend Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-tests.yml'
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-tests.yml'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./frontend
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '24'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './frontend/package-lock.json'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
# - name: Run linter
|
||||
# run: npm run lint
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
83
.github/workflows/release.yml
vendored
83
.github/workflows/release.yml
vendored
|
|
@ -25,6 +25,7 @@ jobs:
|
|||
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -43,6 +44,10 @@ jobs:
|
|||
NEW_VERSION=$(python -c "import version; print(f'{version.__version__}')")
|
||||
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update Changelog
|
||||
run: |
|
||||
python scripts/update_changelog.py ${{ steps.update_version.outputs.new_version }}
|
||||
|
||||
- name: Set repository metadata
|
||||
id: meta
|
||||
run: |
|
||||
|
|
@ -52,9 +57,15 @@ jobs:
|
|||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git add version.py CHANGELOG.md
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
|
|
@ -100,6 +111,28 @@ jobs:
|
|||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
|
|
@ -111,6 +144,7 @@ jobs:
|
|||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
|
|
@ -145,25 +179,48 @@ jobs:
|
|||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
create-release:
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -18,4 +18,5 @@ dump.rdb
|
|||
debugpy*
|
||||
uwsgi.sock
|
||||
package-lock.json
|
||||
models
|
||||
models
|
||||
.idea
|
||||
1014
CHANGELOG.md
Normal file
1014
CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -20,30 +20,88 @@ class TokenObtainPairView(TokenObtainPairView):
|
|||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
# Log blocked login attempt due to network restrictions
|
||||
from core.utils import log_system_event
|
||||
username = request.data.get("username", 'unknown')
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Network access denied',
|
||||
)
|
||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Get the response from the parent class first
|
||||
response = super().post(request, *args, **kwargs)
|
||||
username = request.data.get("username")
|
||||
|
||||
# If login was successful, update last_login
|
||||
if response.status_code == 200:
|
||||
username = request.data.get("username")
|
||||
if username:
|
||||
from django.utils import timezone
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
except User.DoesNotExist:
|
||||
pass # User doesn't exist, but login somehow succeeded
|
||||
# Log login attempt
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
return response
|
||||
try:
|
||||
response = super().post(request, *args, **kwargs)
|
||||
|
||||
# If login was successful, update last_login and log success
|
||||
if response.status_code == 200:
|
||||
if username:
|
||||
from django.utils import timezone
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
# Log successful login
|
||||
log_system_event(
|
||||
event_type='login_success',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
except User.DoesNotExist:
|
||||
pass # User doesn't exist, but login somehow succeeded
|
||||
else:
|
||||
# Log failed login attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Invalid credentials',
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
# If parent class raises an exception (e.g., validation error), log failed attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason=f'Authentication error: {str(e)[:100]}',
|
||||
)
|
||||
raise # Re-raise the exception to maintain normal error flow
|
||||
|
||||
|
||||
class TokenRefreshView(TokenRefreshView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
# Log blocked token refresh attempt due to network restrictions
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user='token_refresh',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Network access denied (token refresh)',
|
||||
)
|
||||
return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return super().post(request, *args, **kwargs)
|
||||
|
|
@ -80,6 +138,15 @@ def initialize_superuser(request):
|
|||
class AuthViewSet(viewsets.ViewSet):
|
||||
"""Handles user login and logout"""
|
||||
|
||||
def get_permissions(self):
|
||||
"""
|
||||
Login doesn't require auth, but logout does
|
||||
"""
|
||||
if self.action == 'logout':
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
return [IsAuthenticated()]
|
||||
return []
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Authenticate and log in a user",
|
||||
request_body=openapi.Schema(
|
||||
|
|
@ -100,6 +167,11 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
password = request.data.get("password")
|
||||
user = authenticate(request, username=username, password=password)
|
||||
|
||||
# Get client info for logging
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
if user:
|
||||
login(request, user)
|
||||
# Update last_login timestamp
|
||||
|
|
@ -107,6 +179,14 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
# Log successful login
|
||||
log_system_event(
|
||||
event_type='login_success',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": "Login successful",
|
||||
|
|
@ -118,6 +198,15 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
},
|
||||
}
|
||||
)
|
||||
|
||||
# Log failed login attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Invalid credentials',
|
||||
)
|
||||
return Response({"error": "Invalid credentials"}, status=400)
|
||||
|
||||
@swagger_auto_schema(
|
||||
|
|
@ -126,6 +215,19 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
)
|
||||
def logout(self, request):
|
||||
"""Logs out the authenticated user"""
|
||||
# Log logout event before actually logging out
|
||||
from core.utils import log_system_event
|
||||
username = request.user.username if request.user and request.user.is_authenticated else 'unknown'
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
log_system_event(
|
||||
event_type='logout',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
|
||||
logout(request)
|
||||
return Response({"message": "Logout successful"})
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ urlpatterns = [
|
|||
path('core/', include(('core.api_urls', 'core'), namespace='core')),
|
||||
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
|
||||
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
|
||||
path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')),
|
||||
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
|
||||
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
|
||||
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
|
||||
|
|
|
|||
0
apps/backups/__init__.py
Normal file
0
apps/backups/__init__.py
Normal file
18
apps/backups/api_urls.py
Normal file
18
apps/backups/api_urls.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from django.urls import path
|
||||
|
||||
from . import api_views
|
||||
|
||||
app_name = "backups"
|
||||
|
||||
urlpatterns = [
|
||||
path("", api_views.list_backups, name="backup-list"),
|
||||
path("create/", api_views.create_backup, name="backup-create"),
|
||||
path("upload/", api_views.upload_backup, name="backup-upload"),
|
||||
path("schedule/", api_views.get_schedule, name="backup-schedule-get"),
|
||||
path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"),
|
||||
path("status/<str:task_id>/", api_views.backup_status, name="backup-status"),
|
||||
path("<str:filename>/download-token/", api_views.get_download_token, name="backup-download-token"),
|
||||
path("<str:filename>/download/", api_views.download_backup, name="backup-download"),
|
||||
path("<str:filename>/delete/", api_views.delete_backup, name="backup-delete"),
|
||||
path("<str:filename>/restore/", api_views.restore_backup, name="backup-restore"),
|
||||
]
|
||||
364
apps/backups/api_views.py
Normal file
364
apps/backups/api_views.py
Normal file
|
|
@ -0,0 +1,364 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse, StreamingHttpResponse, Http404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes, parser_classes
|
||||
from rest_framework.permissions import IsAdminUser, AllowAny
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from . import services
|
||||
from .tasks import create_backup_task, restore_backup_task
|
||||
from .scheduler import get_schedule_settings, update_schedule_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _generate_task_token(task_id: str) -> str:
|
||||
"""Generate a signed token for task status access without auth."""
|
||||
secret = settings.SECRET_KEY.encode()
|
||||
return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32]
|
||||
|
||||
|
||||
def _verify_task_token(task_id: str, token: str) -> bool:
|
||||
"""Verify a task token is valid."""
|
||||
expected = _generate_task_token(task_id)
|
||||
return hmac.compare_digest(expected, token)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def list_backups(request):
|
||||
"""List all available backup files."""
|
||||
try:
|
||||
backups = services.list_backups()
|
||||
return Response(backups, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to list backups: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def create_backup(request):
|
||||
"""Create a new backup (async via Celery)."""
|
||||
try:
|
||||
task = create_backup_task.delay()
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start backup: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def backup_status(request, task_id):
|
||||
"""Check the status of a backup/restore task.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid task_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (for restore when session is invalidated)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(task_id, token):
|
||||
return Response(
|
||||
{"detail": "Invalid task token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
result = AsyncResult(task_id)
|
||||
|
||||
if result.ready():
|
||||
task_result = result.get()
|
||||
if task_result.get("status") == "completed":
|
||||
return Response({
|
||||
"state": "completed",
|
||||
"result": task_result,
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": task_result.get("error", "Unknown error"),
|
||||
})
|
||||
elif result.failed():
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": str(result.result),
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": result.state.lower(),
|
||||
})
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get task status: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_download_token(request, filename):
|
||||
"""Get a signed token for downloading a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
token = _generate_task_token(filename)
|
||||
return Response({"token": token})
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to generate token: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def download_backup(request, filename):
|
||||
"""Download a backup file.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid download_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (avoids CORS preflight issues)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(filename, token):
|
||||
return Response(
|
||||
{"detail": "Invalid download token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
# Security: prevent path traversal by checking for suspicious characters
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = (backup_dir / filename).resolve()
|
||||
|
||||
# Security: ensure the resolved path is still within backup_dir
|
||||
if not str(backup_file).startswith(str(backup_dir.resolve())):
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
if not backup_file.exists() or not backup_file.is_file():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
file_size = backup_file.stat().st_size
|
||||
|
||||
# Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly
|
||||
# Fall back to streaming for non-nginx deployments
|
||||
use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true"
|
||||
logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}")
|
||||
|
||||
if use_nginx_accel:
|
||||
# X-Accel-Redirect: Django returns immediately, nginx serves file
|
||||
logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}")
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = f"/protected-backups/{filename}"
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
else:
|
||||
# Streaming fallback for non-nginx deployments
|
||||
logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)")
|
||||
def file_iterator(file_path, chunk_size=2 * 1024 * 1024):
|
||||
with open(file_path, "rb") as f:
|
||||
while chunk := f.read(chunk_size):
|
||||
yield chunk
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
file_iterator(backup_file),
|
||||
content_type="application/zip",
|
||||
)
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Download failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def delete_backup(request, filename):
|
||||
"""Delete a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
services.delete_backup(filename)
|
||||
return Response(
|
||||
{"detail": "Backup deleted successfully"},
|
||||
status=status.HTTP_204_NO_CONTENT,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise Http404("Backup file not found")
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Delete failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
@parser_classes([MultiPartParser, FormParser])
|
||||
def upload_backup(request):
|
||||
"""Upload a backup file for restoration."""
|
||||
uploaded = request.FILES.get("file")
|
||||
if not uploaded:
|
||||
return Response(
|
||||
{"detail": "No file uploaded"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
backup_dir = services.get_backup_dir()
|
||||
filename = uploaded.name or "uploaded-backup.zip"
|
||||
|
||||
# Ensure unique filename
|
||||
backup_file = backup_dir / filename
|
||||
counter = 1
|
||||
while backup_file.exists():
|
||||
name_parts = filename.rsplit(".", 1)
|
||||
if len(name_parts) == 2:
|
||||
backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}"
|
||||
else:
|
||||
backup_file = backup_dir / f"{filename}-{counter}"
|
||||
counter += 1
|
||||
|
||||
# Save uploaded file
|
||||
with backup_file.open("wb") as f:
|
||||
for chunk in uploaded.chunks():
|
||||
f.write(chunk)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup uploaded successfully",
|
||||
"filename": backup_file.name,
|
||||
},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Upload failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def restore_backup(request, filename):
|
||||
"""Restore from a backup file (async via Celery). WARNING: This will flush the database!"""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
task = restore_backup_task.delay(filename)
|
||||
return Response(
|
||||
{
|
||||
"detail": "Restore started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start restore: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_schedule(request):
|
||||
"""Get backup schedule settings."""
|
||||
try:
|
||||
settings = get_schedule_settings()
|
||||
return Response(settings)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["PUT"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def update_schedule(request):
|
||||
"""Update backup schedule settings."""
|
||||
try:
|
||||
settings = update_schedule_settings(request.data)
|
||||
return Response(settings)
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
{"detail": str(e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to update schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
7
apps/backups/apps.py
Normal file
7
apps/backups/apps.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class BackupsConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.backups"
|
||||
verbose_name = "Backups"
|
||||
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/models.py
Normal file
0
apps/backups/models.py
Normal file
202
apps/backups/scheduler.py
Normal file
202
apps/backups/scheduler.py
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from django_celery_beat.models import PeriodicTask, CrontabSchedule
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task"
|
||||
|
||||
DEFAULTS = {
|
||||
"schedule_enabled": True,
|
||||
"schedule_frequency": "daily",
|
||||
"schedule_time": "03:00",
|
||||
"schedule_day_of_week": 0, # Sunday
|
||||
"retention_count": 3,
|
||||
"schedule_cron_expression": "",
|
||||
}
|
||||
|
||||
|
||||
def _get_backup_settings():
|
||||
"""Get all backup settings from CoreSettings grouped JSON."""
|
||||
try:
|
||||
settings_obj = CoreSettings.objects.get(key="backup_settings")
|
||||
return settings_obj.value if isinstance(settings_obj.value, dict) else DEFAULTS.copy()
|
||||
except CoreSettings.DoesNotExist:
|
||||
return DEFAULTS.copy()
|
||||
|
||||
|
||||
def _update_backup_settings(updates: dict) -> None:
|
||||
"""Update backup settings in the grouped JSON."""
|
||||
obj, created = CoreSettings.objects.get_or_create(
|
||||
key="backup_settings",
|
||||
defaults={"name": "Backup Settings", "value": DEFAULTS.copy()}
|
||||
)
|
||||
current = obj.value if isinstance(obj.value, dict) else {}
|
||||
current.update(updates)
|
||||
obj.value = current
|
||||
obj.save()
|
||||
|
||||
|
||||
def get_schedule_settings() -> dict:
|
||||
"""Get all backup schedule settings."""
|
||||
settings = _get_backup_settings()
|
||||
return {
|
||||
"enabled": bool(settings.get("schedule_enabled", DEFAULTS["schedule_enabled"])),
|
||||
"frequency": str(settings.get("schedule_frequency", DEFAULTS["schedule_frequency"])),
|
||||
"time": str(settings.get("schedule_time", DEFAULTS["schedule_time"])),
|
||||
"day_of_week": int(settings.get("schedule_day_of_week", DEFAULTS["schedule_day_of_week"])),
|
||||
"retention_count": int(settings.get("retention_count", DEFAULTS["retention_count"])),
|
||||
"cron_expression": str(settings.get("schedule_cron_expression", DEFAULTS["schedule_cron_expression"])),
|
||||
}
|
||||
|
||||
|
||||
def update_schedule_settings(data: dict) -> dict:
|
||||
"""Update backup schedule settings and sync the PeriodicTask."""
|
||||
# Validate
|
||||
if "frequency" in data and data["frequency"] not in ("daily", "weekly"):
|
||||
raise ValueError("frequency must be 'daily' or 'weekly'")
|
||||
|
||||
if "time" in data:
|
||||
try:
|
||||
hour, minute = data["time"].split(":")
|
||||
int(hour)
|
||||
int(minute)
|
||||
except (ValueError, AttributeError):
|
||||
raise ValueError("time must be in HH:MM format")
|
||||
|
||||
if "day_of_week" in data:
|
||||
day = int(data["day_of_week"])
|
||||
if day < 0 or day > 6:
|
||||
raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)")
|
||||
|
||||
if "retention_count" in data:
|
||||
count = int(data["retention_count"])
|
||||
if count < 0:
|
||||
raise ValueError("retention_count must be >= 0")
|
||||
|
||||
# Update settings with proper key names
|
||||
updates = {}
|
||||
if "enabled" in data:
|
||||
updates["schedule_enabled"] = bool(data["enabled"])
|
||||
if "frequency" in data:
|
||||
updates["schedule_frequency"] = str(data["frequency"])
|
||||
if "time" in data:
|
||||
updates["schedule_time"] = str(data["time"])
|
||||
if "day_of_week" in data:
|
||||
updates["schedule_day_of_week"] = int(data["day_of_week"])
|
||||
if "retention_count" in data:
|
||||
updates["retention_count"] = int(data["retention_count"])
|
||||
if "cron_expression" in data:
|
||||
updates["schedule_cron_expression"] = str(data["cron_expression"])
|
||||
|
||||
_update_backup_settings(updates)
|
||||
|
||||
# Sync the periodic task
|
||||
_sync_periodic_task()
|
||||
|
||||
return get_schedule_settings()
|
||||
|
||||
|
||||
def _sync_periodic_task() -> None:
|
||||
"""Create, update, or delete the scheduled backup task based on settings."""
|
||||
settings = get_schedule_settings()
|
||||
|
||||
if not settings["enabled"]:
|
||||
# Delete the task if it exists
|
||||
task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first()
|
||||
if task:
|
||||
old_crontab = task.crontab
|
||||
task.delete()
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
logger.info("Backup schedule disabled, removed periodic task")
|
||||
return
|
||||
|
||||
# Get old crontab before creating new one
|
||||
old_crontab = None
|
||||
try:
|
||||
old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME)
|
||||
old_crontab = old_task.crontab
|
||||
except PeriodicTask.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Check if using cron expression (advanced mode)
|
||||
if settings["cron_expression"]:
|
||||
# Parse cron expression: "minute hour day month weekday"
|
||||
try:
|
||||
parts = settings["cron_expression"].split()
|
||||
if len(parts) != 5:
|
||||
raise ValueError("Cron expression must have 5 parts: minute hour day month weekday")
|
||||
|
||||
minute, hour, day_of_month, month_of_year, day_of_week = parts
|
||||
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=day_of_week,
|
||||
day_of_month=day_of_month,
|
||||
month_of_year=month_of_year,
|
||||
timezone=CoreSettings.get_system_time_zone(),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}")
|
||||
raise ValueError(f"Invalid cron expression: {e}")
|
||||
else:
|
||||
# Use simple frequency-based scheduling
|
||||
# Parse time
|
||||
hour, minute = settings["time"].split(":")
|
||||
|
||||
# Build crontab based on frequency
|
||||
system_tz = CoreSettings.get_system_time_zone()
|
||||
if settings["frequency"] == "daily":
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week="*",
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
else: # weekly
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=str(settings["day_of_week"]),
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
|
||||
# Create or update the periodic task
|
||||
task, created = PeriodicTask.objects.update_or_create(
|
||||
name=BACKUP_SCHEDULE_TASK_NAME,
|
||||
defaults={
|
||||
"task": "apps.backups.tasks.scheduled_backup_task",
|
||||
"crontab": crontab,
|
||||
"enabled": True,
|
||||
"kwargs": json.dumps({"retention_count": settings["retention_count"]}),
|
||||
},
|
||||
)
|
||||
|
||||
# Clean up old crontab if it changed and is orphaned
|
||||
if old_crontab and old_crontab.id != crontab.id:
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
|
||||
action = "Created" if created else "Updated"
|
||||
logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}")
|
||||
|
||||
|
||||
def _cleanup_orphaned_crontab(crontab_schedule):
|
||||
"""Delete old CrontabSchedule if no other tasks are using it."""
|
||||
if crontab_schedule is None:
|
||||
return
|
||||
|
||||
# Check if any other tasks are using this crontab
|
||||
if PeriodicTask.objects.filter(crontab=crontab_schedule).exists():
|
||||
logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting")
|
||||
return
|
||||
|
||||
logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}")
|
||||
crontab_schedule.delete()
|
||||
350
apps/backups/services.py
Normal file
350
apps/backups/services.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import datetime
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
import logging
|
||||
import pytz
|
||||
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_backup_dir() -> Path:
|
||||
"""Get the backup directory, creating it if necessary."""
|
||||
backup_dir = Path(settings.BACKUP_ROOT)
|
||||
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
return backup_dir
|
||||
|
||||
|
||||
def _is_postgresql() -> bool:
|
||||
"""Check if we're using PostgreSQL."""
|
||||
return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql"
|
||||
|
||||
|
||||
def _get_pg_env() -> dict:
|
||||
"""Get environment variables for PostgreSQL commands."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
env = os.environ.copy()
|
||||
env["PGPASSWORD"] = db_config.get("PASSWORD", "")
|
||||
return env
|
||||
|
||||
|
||||
def _get_pg_args() -> list[str]:
|
||||
"""Get common PostgreSQL command arguments."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
return [
|
||||
"-h", db_config.get("HOST", "localhost"),
|
||||
"-p", str(db_config.get("PORT", 5432)),
|
||||
"-U", db_config.get("USER", "postgres"),
|
||||
"-d", db_config.get("NAME", "dispatcharr"),
|
||||
]
|
||||
|
||||
|
||||
def _dump_postgresql(output_file: Path) -> None:
|
||||
"""Dump PostgreSQL database using pg_dump."""
|
||||
logger.info("Dumping PostgreSQL database with pg_dump...")
|
||||
|
||||
cmd = [
|
||||
"pg_dump",
|
||||
*_get_pg_args(),
|
||||
"-Fc", # Custom format for pg_restore
|
||||
"-v", # Verbose
|
||||
"-f", str(output_file),
|
||||
]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"pg_dump failed: {result.stderr}")
|
||||
raise RuntimeError(f"pg_dump failed: {result.stderr}")
|
||||
|
||||
logger.debug(f"pg_dump output: {result.stderr}")
|
||||
|
||||
|
||||
def _clean_postgresql_schema() -> None:
|
||||
"""Drop and recreate the public schema to ensure a completely clean restore."""
|
||||
logger.info("[PG_CLEAN] Dropping and recreating public schema...")
|
||||
|
||||
# Commands to drop and recreate schema
|
||||
sql_commands = "DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO public;"
|
||||
|
||||
cmd = [
|
||||
"psql",
|
||||
*_get_pg_args(),
|
||||
"-c", sql_commands,
|
||||
]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"[PG_CLEAN] Failed to clean schema: {result.stderr}")
|
||||
raise RuntimeError(f"Failed to clean PostgreSQL schema: {result.stderr}")
|
||||
|
||||
logger.info("[PG_CLEAN] Schema cleaned successfully")
|
||||
|
||||
|
||||
def _restore_postgresql(dump_file: Path) -> None:
|
||||
"""Restore PostgreSQL database using pg_restore."""
|
||||
logger.info("[PG_RESTORE] Starting pg_restore...")
|
||||
logger.info(f"[PG_RESTORE] Dump file: {dump_file}")
|
||||
|
||||
# Drop and recreate schema to ensure a completely clean restore
|
||||
_clean_postgresql_schema()
|
||||
|
||||
pg_args = _get_pg_args()
|
||||
logger.info(f"[PG_RESTORE] Connection args: {pg_args}")
|
||||
|
||||
cmd = [
|
||||
"pg_restore",
|
||||
"--no-owner", # Skip ownership commands (we already created schema)
|
||||
*pg_args,
|
||||
"-v", # Verbose
|
||||
str(dump_file),
|
||||
]
|
||||
|
||||
logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}")
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
logger.info(f"[PG_RESTORE] Return code: {result.returncode}")
|
||||
|
||||
# pg_restore may return non-zero even on partial success
|
||||
# Check for actual errors vs warnings
|
||||
if result.returncode != 0:
|
||||
# Some errors during restore are expected (e.g., "does not exist" when cleaning)
|
||||
# Only fail on critical errors
|
||||
stderr = result.stderr.lower()
|
||||
if "fatal" in stderr or "could not connect" in stderr:
|
||||
logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}")
|
||||
raise RuntimeError(f"pg_restore failed: {result.stderr}")
|
||||
else:
|
||||
logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...")
|
||||
|
||||
logger.info("[PG_RESTORE] Completed successfully")
|
||||
|
||||
|
||||
def _dump_sqlite(output_file: Path) -> None:
|
||||
"""Dump SQLite database using sqlite3 .backup command."""
|
||||
logger.info("Dumping SQLite database with sqlite3 .backup...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
|
||||
if not db_path.exists():
|
||||
raise FileNotFoundError(f"SQLite database not found: {db_path}")
|
||||
|
||||
# Use sqlite3 .backup command via stdin for reliable execution
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=f".backup '{output_file}'\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 backup failed: {result.stderr}")
|
||||
raise RuntimeError(f"sqlite3 backup failed: {result.stderr}")
|
||||
|
||||
# Verify the backup file was created
|
||||
if not output_file.exists():
|
||||
raise RuntimeError("sqlite3 backup failed: output file not created")
|
||||
|
||||
logger.info(f"sqlite3 backup completed successfully: {output_file}")
|
||||
|
||||
|
||||
def _restore_sqlite(dump_file: Path) -> None:
|
||||
"""Restore SQLite database by replacing the database file."""
|
||||
logger.info("Restoring SQLite database...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
backup_current = None
|
||||
|
||||
# Backup current database before overwriting
|
||||
if db_path.exists():
|
||||
backup_current = db_path.with_suffix(".db.bak")
|
||||
shutil.copy2(db_path, backup_current)
|
||||
logger.info(f"Backed up current database to {backup_current}")
|
||||
|
||||
# Ensure parent directory exists
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# The backup file from _dump_sqlite is a complete SQLite database file
|
||||
# We can simply copy it over the existing database
|
||||
shutil.copy2(dump_file, db_path)
|
||||
|
||||
# Verify the restore worked by checking if sqlite3 can read it
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=".tables\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 verification failed: {result.stderr}")
|
||||
# Try to restore from backup
|
||||
if backup_current and backup_current.exists():
|
||||
shutil.copy2(backup_current, db_path)
|
||||
logger.info("Restored original database from backup")
|
||||
raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}")
|
||||
|
||||
logger.info("sqlite3 restore completed successfully")
|
||||
|
||||
|
||||
def create_backup() -> Path:
|
||||
"""
|
||||
Create a backup archive containing database dump and data directories.
|
||||
Returns the path to the created backup file.
|
||||
"""
|
||||
backup_dir = get_backup_dir()
|
||||
|
||||
# Use system timezone for filename (user-friendly), but keep internal timestamps as UTC
|
||||
system_tz_name = CoreSettings.get_system_time_zone()
|
||||
try:
|
||||
system_tz = pytz.timezone(system_tz_name)
|
||||
now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz)
|
||||
timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC")
|
||||
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
|
||||
|
||||
backup_name = f"dispatcharr-backup-{timestamp}.zip"
|
||||
backup_file = backup_dir / backup_name
|
||||
|
||||
logger.info(f"Creating backup: {backup_name}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Determine database type and dump accordingly
|
||||
if _is_postgresql():
|
||||
db_dump_file = temp_path / "database.dump"
|
||||
_dump_postgresql(db_dump_file)
|
||||
db_type = "postgresql"
|
||||
else:
|
||||
db_dump_file = temp_path / "database.sqlite3"
|
||||
_dump_sqlite(db_dump_file)
|
||||
db_type = "sqlite"
|
||||
|
||||
# Create ZIP archive with compression and ZIP64 support for large files
|
||||
with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
|
||||
# Add database dump
|
||||
zip_file.write(db_dump_file, db_dump_file.name)
|
||||
|
||||
# Add metadata
|
||||
metadata = {
|
||||
"format": "dispatcharr-backup",
|
||||
"version": 2,
|
||||
"database_type": db_type,
|
||||
"database_file": db_dump_file.name,
|
||||
"created_at": datetime.datetime.now(datetime.UTC).isoformat(),
|
||||
}
|
||||
zip_file.writestr("metadata.json", json.dumps(metadata, indent=2))
|
||||
|
||||
logger.info(f"Backup created successfully: {backup_file}")
|
||||
return backup_file
|
||||
|
||||
|
||||
def restore_backup(backup_file: Path) -> None:
|
||||
"""
|
||||
Restore from a backup archive.
|
||||
WARNING: This will overwrite the database!
|
||||
"""
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {backup_file}")
|
||||
|
||||
logger.info(f"Restoring from backup: {backup_file}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Extract backup
|
||||
logger.debug("Extracting backup archive...")
|
||||
with ZipFile(backup_file, "r") as zip_file:
|
||||
zip_file.extractall(temp_path)
|
||||
|
||||
# Read metadata
|
||||
metadata_file = temp_path / "metadata.json"
|
||||
if not metadata_file.exists():
|
||||
raise ValueError("Invalid backup: missing metadata.json")
|
||||
|
||||
with open(metadata_file) as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
# Restore database
|
||||
_restore_database(temp_path, metadata)
|
||||
|
||||
logger.info("Restore completed successfully")
|
||||
|
||||
|
||||
def _restore_database(temp_path: Path, metadata: dict) -> None:
|
||||
"""Restore database from backup."""
|
||||
db_type = metadata.get("database_type", "postgresql")
|
||||
db_file = metadata.get("database_file", "database.dump")
|
||||
dump_file = temp_path / db_file
|
||||
|
||||
if not dump_file.exists():
|
||||
raise ValueError(f"Invalid backup: missing {db_file}")
|
||||
|
||||
current_db_type = "postgresql" if _is_postgresql() else "sqlite"
|
||||
|
||||
if db_type != current_db_type:
|
||||
raise ValueError(
|
||||
f"Database type mismatch: backup is {db_type}, "
|
||||
f"but current database is {current_db_type}"
|
||||
)
|
||||
|
||||
if db_type == "postgresql":
|
||||
_restore_postgresql(dump_file)
|
||||
else:
|
||||
_restore_sqlite(dump_file)
|
||||
|
||||
|
||||
def list_backups() -> list[dict]:
|
||||
"""List all available backup files with metadata."""
|
||||
backup_dir = get_backup_dir()
|
||||
backups = []
|
||||
|
||||
for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True):
|
||||
# Use UTC timezone so frontend can convert to user's local time
|
||||
created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC)
|
||||
backups.append({
|
||||
"name": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"created": created_time.isoformat(),
|
||||
})
|
||||
|
||||
return backups
|
||||
|
||||
|
||||
def delete_backup(filename: str) -> None:
|
||||
"""Delete a backup file."""
|
||||
backup_dir = get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {filename}")
|
||||
|
||||
if not backup_file.is_file():
|
||||
raise ValueError(f"Invalid backup file: {filename}")
|
||||
|
||||
backup_file.unlink()
|
||||
logger.info(f"Deleted backup: {filename}")
|
||||
106
apps/backups/tasks.py
Normal file
106
apps/backups/tasks.py
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import logging
|
||||
import traceback
|
||||
from celery import shared_task
|
||||
|
||||
from . import services
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup_old_backups(retention_count: int) -> int:
|
||||
"""Delete old backups, keeping only the most recent N. Returns count deleted."""
|
||||
if retention_count <= 0:
|
||||
return 0
|
||||
|
||||
backups = services.list_backups()
|
||||
if len(backups) <= retention_count:
|
||||
return 0
|
||||
|
||||
# Backups are sorted newest first, so delete from the end
|
||||
to_delete = backups[retention_count:]
|
||||
deleted = 0
|
||||
|
||||
for backup in to_delete:
|
||||
try:
|
||||
services.delete_backup(backup["name"])
|
||||
deleted += 1
|
||||
logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}")
|
||||
except Exception as e:
|
||||
logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def create_backup_task(self):
|
||||
"""Celery task to create a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[BACKUP] Starting backup task {self.request.id}")
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_backup_task(self, filename: str):
|
||||
"""Celery task to restore a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}")
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
logger.info(f"[RESTORE] Backup file path: {backup_file}")
|
||||
services.restore_backup(backup_file)
|
||||
logger.info(f"[RESTORE] Task {self.request.id} completed successfully")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": filename,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def scheduled_backup_task(self, retention_count: int = 0):
|
||||
"""Celery task for scheduled backups with optional retention cleanup."""
|
||||
try:
|
||||
logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}")
|
||||
|
||||
# Create backup
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[SCHEDULED] Backup created: {backup_file.name}")
|
||||
|
||||
# Cleanup old backups if retention is set
|
||||
deleted = 0
|
||||
if retention_count > 0:
|
||||
deleted = _cleanup_old_backups(retention_count)
|
||||
logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"deleted_count": deleted,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
1163
apps/backups/tests.py
Normal file
1163
apps/backups/tests.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -47,7 +47,7 @@ urlpatterns = [
|
|||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -8,7 +8,10 @@ from drf_yasg.utils import swagger_auto_schema
|
|||
from drf_yasg import openapi
|
||||
from django.shortcuts import get_object_or_404, get_list_or_404
|
||||
from django.db import transaction
|
||||
import os, json, requests, logging
|
||||
from django.db.models import Q
|
||||
import os, json, requests, logging, mimetypes
|
||||
from django.utils.http import http_date
|
||||
from urllib.parse import unquote
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
IsAdmin,
|
||||
|
|
@ -124,10 +127,12 @@ class StreamViewSet(viewsets.ModelViewSet):
|
|||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = StreamFilter
|
||||
search_fields = ["name", "channel_group__name"]
|
||||
ordering_fields = ["name", "channel_group__name"]
|
||||
ordering_fields = ["name", "channel_group__name", "m3u_account__name"]
|
||||
ordering = ["-name"]
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "duplicate":
|
||||
return [IsAdmin()]
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
|
|
@ -234,12 +239,8 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
"""Add annotation for association counts"""
|
||||
from django.db.models import Count
|
||||
return ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
)
|
||||
"""Return channel groups with prefetched relations for efficient counting"""
|
||||
return ChannelGroup.objects.prefetch_related('channels', 'm3u_accounts').all()
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
"""Override update to check M3U associations"""
|
||||
|
|
@ -275,15 +276,20 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
@action(detail=False, methods=["post"], url_path="cleanup")
|
||||
def cleanup_unused_groups(self, request):
|
||||
"""Delete all channel groups with no channels or M3U account associations"""
|
||||
from django.db.models import Count
|
||||
from django.db.models import Q, Exists, OuterRef
|
||||
|
||||
# Find groups with no channels and no M3U account associations using Exists subqueries
|
||||
from .models import Channel, ChannelGroupM3UAccount
|
||||
|
||||
has_channels = Channel.objects.filter(channel_group_id=OuterRef('pk'))
|
||||
has_accounts = ChannelGroupM3UAccount.objects.filter(channel_group_id=OuterRef('pk'))
|
||||
|
||||
# Find groups with no channels and no M3U account associations
|
||||
unused_groups = ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
has_channels=Exists(has_channels),
|
||||
has_accounts=Exists(has_accounts)
|
||||
).filter(
|
||||
channel_count=0,
|
||||
m3u_account_count=0
|
||||
has_channels=False,
|
||||
has_accounts=False
|
||||
)
|
||||
|
||||
deleted_count = unused_groups.count()
|
||||
|
|
@ -384,6 +390,72 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
ordering_fields = ["channel_number", "name", "channel_group__name"]
|
||||
ordering = ["-channel_number"]
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Override create to handle channel profile membership"""
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
with transaction.atomic():
|
||||
channel = serializer.save()
|
||||
|
||||
# Handle channel profile membership
|
||||
# Semantics:
|
||||
# - Omitted (None): add to ALL profiles (backward compatible default)
|
||||
# - Empty array []: add to NO profiles
|
||||
# - Sentinel [0] or 0: add to ALL profiles (explicit)
|
||||
# - [1,2,...]: add to specified profile IDs only
|
||||
channel_profile_ids = request.data.get("channel_profile_ids")
|
||||
if channel_profile_ids is not None:
|
||||
# Normalize single ID to array
|
||||
if not isinstance(channel_profile_ids, list):
|
||||
channel_profile_ids = [channel_profile_ids]
|
||||
|
||||
# Determine action based on semantics
|
||||
if channel_profile_ids is None:
|
||||
# Omitted -> add to all profiles (backward compatible)
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True)
|
||||
for profile in profiles
|
||||
])
|
||||
elif isinstance(channel_profile_ids, list) and len(channel_profile_ids) == 0:
|
||||
# Empty array -> add to no profiles
|
||||
pass
|
||||
elif isinstance(channel_profile_ids, list) and 0 in channel_profile_ids:
|
||||
# Sentinel 0 -> add to all profiles (explicit)
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True)
|
||||
for profile in profiles
|
||||
])
|
||||
else:
|
||||
# Specific profile IDs
|
||||
try:
|
||||
channel_profiles = ChannelProfile.objects.filter(id__in=channel_profile_ids)
|
||||
if len(channel_profiles) != len(channel_profile_ids):
|
||||
missing_ids = set(channel_profile_ids) - set(channel_profiles.values_list('id', flat=True))
|
||||
return Response(
|
||||
{"error": f"Channel profiles with IDs {list(missing_ids)} not found"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(
|
||||
channel_profile=profile,
|
||||
channel=channel,
|
||||
enabled=True
|
||||
)
|
||||
for profile in channel_profiles
|
||||
])
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Error creating profile memberships: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action in [
|
||||
"edit_bulk",
|
||||
|
|
@ -419,10 +491,41 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
group_names = channel_group.split(",")
|
||||
qs = qs.filter(channel_group__name__in=group_names)
|
||||
|
||||
if self.request.user.user_level < 10:
|
||||
qs = qs.filter(user_level__lte=self.request.user.user_level)
|
||||
filters = {}
|
||||
q_filters = Q()
|
||||
|
||||
return qs
|
||||
channel_profile_id = self.request.query_params.get("channel_profile_id")
|
||||
show_disabled_param = self.request.query_params.get("show_disabled", None)
|
||||
only_streamless = self.request.query_params.get("only_streamless", None)
|
||||
|
||||
if channel_profile_id:
|
||||
try:
|
||||
profile_id_int = int(channel_profile_id)
|
||||
|
||||
if show_disabled_param is None:
|
||||
# Show only enabled channels: channels that have a membership
|
||||
# record for this profile with enabled=True
|
||||
# Default is DISABLED (channels without membership are hidden)
|
||||
filters["channelprofilemembership__channel_profile_id"] = profile_id_int
|
||||
filters["channelprofilemembership__enabled"] = True
|
||||
# If show_disabled is True, show all channels (no filtering needed)
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# Ignore invalid profile id values
|
||||
pass
|
||||
|
||||
if only_streamless:
|
||||
q_filters &= Q(streams__isnull=True)
|
||||
|
||||
if self.request.user.user_level < 10:
|
||||
filters["user_level__lte"] = self.request.user.user_level
|
||||
|
||||
if filters:
|
||||
qs = qs.filter(**filters)
|
||||
if q_filters:
|
||||
qs = qs.filter(q_filters)
|
||||
|
||||
return qs.distinct()
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
|
|
@ -518,11 +621,18 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
# Single bulk_update query instead of individual saves
|
||||
channels_to_update = [channel for channel, _ in validated_updates]
|
||||
if channels_to_update:
|
||||
Channel.objects.bulk_update(
|
||||
channels_to_update,
|
||||
fields=list(validated_updates[0][1].keys()),
|
||||
batch_size=100
|
||||
)
|
||||
# Collect all unique field names from all updates
|
||||
all_fields = set()
|
||||
for _, validated_data in validated_updates:
|
||||
all_fields.update(validated_data.keys())
|
||||
|
||||
# Only call bulk_update if there are fields to update
|
||||
if all_fields:
|
||||
Channel.objects.bulk_update(
|
||||
channels_to_update,
|
||||
fields=list(all_fields),
|
||||
batch_size=100
|
||||
)
|
||||
|
||||
# Return the updated objects (already in memory)
|
||||
serialized_channels = ChannelSerializer(
|
||||
|
|
@ -707,7 +817,7 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
"channel_profile_ids": openapi.Schema(
|
||||
type=openapi.TYPE_ARRAY,
|
||||
items=openapi.Items(type=openapi.TYPE_INTEGER),
|
||||
description="(Optional) Channel profile ID(s) to add the channel to. Can be a single ID or array of IDs. If not provided, channel is added to all profiles."
|
||||
description="(Optional) Channel profile ID(s). Behavior: omitted = add to ALL profiles (default); empty array [] = add to NO profiles; [0] = add to ALL profiles (explicit); [1,2,...] = add only to specified profiles."
|
||||
),
|
||||
},
|
||||
),
|
||||
|
|
@ -800,14 +910,37 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
channel.streams.add(stream)
|
||||
|
||||
# Handle channel profile membership
|
||||
# Semantics:
|
||||
# - Omitted (None): add to ALL profiles (backward compatible default)
|
||||
# - Empty array []: add to NO profiles
|
||||
# - Sentinel [0] or 0: add to ALL profiles (explicit)
|
||||
# - [1,2,...]: add to specified profile IDs only
|
||||
channel_profile_ids = request.data.get("channel_profile_ids")
|
||||
if channel_profile_ids is not None:
|
||||
# Normalize single ID to array
|
||||
if not isinstance(channel_profile_ids, list):
|
||||
channel_profile_ids = [channel_profile_ids]
|
||||
|
||||
if channel_profile_ids:
|
||||
# Add channel only to the specified profiles
|
||||
# Determine action based on semantics
|
||||
if channel_profile_ids is None:
|
||||
# Omitted -> add to all profiles (backward compatible)
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True)
|
||||
for profile in profiles
|
||||
])
|
||||
elif isinstance(channel_profile_ids, list) and len(channel_profile_ids) == 0:
|
||||
# Empty array -> add to no profiles
|
||||
pass
|
||||
elif isinstance(channel_profile_ids, list) and 0 in channel_profile_ids:
|
||||
# Sentinel 0 -> add to all profiles (explicit)
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True)
|
||||
for profile in profiles
|
||||
])
|
||||
else:
|
||||
# Specific profile IDs
|
||||
try:
|
||||
channel_profiles = ChannelProfile.objects.filter(id__in=channel_profile_ids)
|
||||
if len(channel_profiles) != len(channel_profile_ids):
|
||||
|
|
@ -830,13 +963,6 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
{"error": f"Error creating profile memberships: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
else:
|
||||
# Default behavior: add to all profiles
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True)
|
||||
for profile in profiles
|
||||
])
|
||||
|
||||
# Send WebSocket notification for single channel creation
|
||||
from core.utils import send_websocket_update
|
||||
|
|
@ -869,7 +995,7 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
"channel_profile_ids": openapi.Schema(
|
||||
type=openapi.TYPE_ARRAY,
|
||||
items=openapi.Items(type=openapi.TYPE_INTEGER),
|
||||
description="(Optional) Channel profile ID(s) to add the channels to. If not provided, channels are added to all profiles."
|
||||
description="(Optional) Channel profile ID(s). Behavior: omitted = add to ALL profiles (default); empty array [] = add to NO profiles; [0] = add to ALL profiles (explicit); [1,2,...] = add only to specified profiles."
|
||||
),
|
||||
"starting_channel_number": openapi.Schema(
|
||||
type=openapi.TYPE_INTEGER,
|
||||
|
|
@ -1528,11 +1654,10 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
"""Streams the logo file, whether it's local or remote."""
|
||||
logo = self.get_object()
|
||||
logo_url = logo.url
|
||||
|
||||
if logo_url.startswith("/data"): # Local file
|
||||
if not os.path.exists(logo_url):
|
||||
raise Http404("Image not found")
|
||||
|
||||
stat = os.stat(logo_url)
|
||||
# Get proper mime type (first item of the tuple)
|
||||
content_type, _ = mimetypes.guess_type(logo_url)
|
||||
if not content_type:
|
||||
|
|
@ -1542,6 +1667,8 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
response = StreamingHttpResponse(
|
||||
open(logo_url, "rb"), content_type=content_type
|
||||
)
|
||||
response["Cache-Control"] = "public, max-age=14400" # Cache in browser for 4 hours
|
||||
response["Last-Modified"] = http_date(stat.st_mtime)
|
||||
response["Content-Disposition"] = 'inline; filename="{}"'.format(
|
||||
os.path.basename(logo_url)
|
||||
)
|
||||
|
|
@ -1581,6 +1708,10 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
remote_response.iter_content(chunk_size=8192),
|
||||
content_type=content_type,
|
||||
)
|
||||
if(remote_response.headers.get("Cache-Control")):
|
||||
response["Cache-Control"] = remote_response.headers.get("Cache-Control")
|
||||
if(remote_response.headers.get("Last-Modified")):
|
||||
response["Last-Modified"] = remote_response.headers.get("Last-Modified")
|
||||
response["Content-Disposition"] = 'inline; filename="{}"'.format(
|
||||
os.path.basename(logo_url)
|
||||
)
|
||||
|
|
@ -1612,11 +1743,58 @@ class ChannelProfileViewSet(viewsets.ModelViewSet):
|
|||
return self.request.user.channel_profiles.all()
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "duplicate":
|
||||
return [IsAdmin()]
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@action(detail=True, methods=["post"], url_path="duplicate", permission_classes=[IsAdmin])
|
||||
def duplicate(self, request, pk=None):
|
||||
requested_name = str(request.data.get("name", "")).strip()
|
||||
|
||||
if not requested_name:
|
||||
return Response(
|
||||
{"detail": "Name is required to duplicate a profile."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if ChannelProfile.objects.filter(name=requested_name).exists():
|
||||
return Response(
|
||||
{"detail": "A channel profile with this name already exists."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
source_profile = self.get_object()
|
||||
|
||||
with transaction.atomic():
|
||||
new_profile = ChannelProfile.objects.create(name=requested_name)
|
||||
|
||||
source_memberships = ChannelProfileMembership.objects.filter(
|
||||
channel_profile=source_profile
|
||||
)
|
||||
source_enabled_map = {
|
||||
membership.channel_id: membership.enabled
|
||||
for membership in source_memberships
|
||||
}
|
||||
|
||||
new_memberships = list(
|
||||
ChannelProfileMembership.objects.filter(channel_profile=new_profile)
|
||||
)
|
||||
for membership in new_memberships:
|
||||
membership.enabled = source_enabled_map.get(
|
||||
membership.channel_id, False
|
||||
)
|
||||
|
||||
if new_memberships:
|
||||
ChannelProfileMembership.objects.bulk_update(
|
||||
new_memberships, ["enabled"]
|
||||
)
|
||||
|
||||
serializer = self.get_serializer(new_profile)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class GetChannelStreamsAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
|
|
@ -1673,6 +1851,30 @@ class BulkUpdateChannelMembershipAPIView(APIView):
|
|||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Bulk enable or disable channels for a specific profile. Creates membership records if they don't exist.",
|
||||
request_body=BulkChannelProfileMembershipSerializer,
|
||||
responses={
|
||||
200: openapi.Response(
|
||||
description="Channels updated successfully",
|
||||
schema=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
properties={
|
||||
"status": openapi.Schema(type=openapi.TYPE_STRING, example="success"),
|
||||
"updated": openapi.Schema(type=openapi.TYPE_INTEGER, description="Number of channels updated"),
|
||||
"created": openapi.Schema(type=openapi.TYPE_INTEGER, description="Number of new memberships created"),
|
||||
"invalid_channels": openapi.Schema(
|
||||
type=openapi.TYPE_ARRAY,
|
||||
items=openapi.Schema(type=openapi.TYPE_INTEGER),
|
||||
description="List of channel IDs that don't exist"
|
||||
),
|
||||
},
|
||||
),
|
||||
),
|
||||
400: "Invalid request data",
|
||||
404: "Profile not found",
|
||||
},
|
||||
)
|
||||
def patch(self, request, profile_id):
|
||||
"""Bulk enable or disable channels for a specific profile"""
|
||||
# Get the channel profile
|
||||
|
|
@ -1685,21 +1887,67 @@ class BulkUpdateChannelMembershipAPIView(APIView):
|
|||
updates = serializer.validated_data["channels"]
|
||||
channel_ids = [entry["channel_id"] for entry in updates]
|
||||
|
||||
memberships = ChannelProfileMembership.objects.filter(
|
||||
# Validate that all channels exist
|
||||
existing_channels = set(
|
||||
Channel.objects.filter(id__in=channel_ids).values_list("id", flat=True)
|
||||
)
|
||||
invalid_channels = [cid for cid in channel_ids if cid not in existing_channels]
|
||||
|
||||
if invalid_channels:
|
||||
return Response(
|
||||
{
|
||||
"error": "Some channels do not exist",
|
||||
"invalid_channels": invalid_channels,
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get existing memberships
|
||||
existing_memberships = ChannelProfileMembership.objects.filter(
|
||||
channel_profile=channel_profile, channel_id__in=channel_ids
|
||||
)
|
||||
membership_dict = {m.channel_id: m for m in existing_memberships}
|
||||
|
||||
membership_dict = {m.channel.id: m for m in memberships}
|
||||
# Prepare lists for bulk operations
|
||||
memberships_to_update = []
|
||||
memberships_to_create = []
|
||||
|
||||
for entry in updates:
|
||||
channel_id = entry["channel_id"]
|
||||
enabled_status = entry["enabled"]
|
||||
|
||||
if channel_id in membership_dict:
|
||||
# Update existing membership
|
||||
membership_dict[channel_id].enabled = enabled_status
|
||||
memberships_to_update.append(membership_dict[channel_id])
|
||||
else:
|
||||
# Create new membership
|
||||
memberships_to_create.append(
|
||||
ChannelProfileMembership(
|
||||
channel_profile=channel_profile,
|
||||
channel_id=channel_id,
|
||||
enabled=enabled_status,
|
||||
)
|
||||
)
|
||||
|
||||
ChannelProfileMembership.objects.bulk_update(memberships, ["enabled"])
|
||||
# Perform bulk operations
|
||||
with transaction.atomic():
|
||||
if memberships_to_update:
|
||||
ChannelProfileMembership.objects.bulk_update(
|
||||
memberships_to_update, ["enabled"]
|
||||
)
|
||||
if memberships_to_create:
|
||||
ChannelProfileMembership.objects.bulk_create(memberships_to_create)
|
||||
|
||||
return Response({"status": "success"}, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
{
|
||||
"status": "success",
|
||||
"updated": len(memberships_to_update),
|
||||
"created": len(memberships_to_create),
|
||||
"invalid_channels": [],
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
|
@ -1745,7 +1993,7 @@ class RecordingViewSet(viewsets.ModelViewSet):
|
|||
|
||||
def get_permissions(self):
|
||||
# Allow unauthenticated playback of recording files (like other streaming endpoints)
|
||||
if getattr(self, 'action', None) == 'file':
|
||||
if self.action == 'file':
|
||||
return [AllowAny()]
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
|
|
@ -2026,7 +2274,7 @@ class DeleteSeriesRuleAPIView(APIView):
|
|||
return [Authenticated()]
|
||||
|
||||
def delete(self, request, tvg_id):
|
||||
tvg_id = str(tvg_id)
|
||||
tvg_id = unquote(str(tvg_id or ""))
|
||||
rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id]
|
||||
CoreSettings.set_dvr_series_rules(rules)
|
||||
return Response({"success": True, "rules": rules})
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
# Generated by Django 5.2.9 on 2026-01-09 18:19
|
||||
|
||||
import datetime
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0030_alter_stream_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='is_stale',
|
||||
field=models.BooleanField(db_index=True, default=False, help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='last_seen',
|
||||
field=models.DateTimeField(db_index=True, default=datetime.datetime.now, help_text='Last time this group was seen in the M3U source during a refresh'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='is_stale',
|
||||
field=models.BooleanField(db_index=True, default=False, help_text='Whether this stream is stale (not seen in recent refresh, pending deletion)'),
|
||||
),
|
||||
]
|
||||
|
|
@ -94,6 +94,11 @@ class Stream(models.Model):
|
|||
db_index=True,
|
||||
)
|
||||
last_seen = models.DateTimeField(db_index=True, default=datetime.now)
|
||||
is_stale = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this stream is stale (not seen in recent refresh, pending deletion)"
|
||||
)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
# Stream statistics fields
|
||||
|
|
@ -119,11 +124,11 @@ class Stream(models.Model):
|
|||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
|
|
@ -589,6 +594,16 @@ class ChannelGroupM3UAccount(models.Model):
|
|||
blank=True,
|
||||
help_text='Starting channel number for auto-created channels in this group'
|
||||
)
|
||||
last_seen = models.DateTimeField(
|
||||
default=datetime.now,
|
||||
db_index=True,
|
||||
help_text='Last time this group was seen in the M3U source during a refresh'
|
||||
)
|
||||
is_stale = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ("channel_group", "m3u_account")
|
||||
|
|
|
|||
|
|
@ -119,6 +119,7 @@ class StreamSerializer(serializers.ModelSerializer):
|
|||
"current_viewers",
|
||||
"updated_at",
|
||||
"last_seen",
|
||||
"is_stale",
|
||||
"stream_profile_id",
|
||||
"is_custom",
|
||||
"channel_group",
|
||||
|
|
@ -155,7 +156,7 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties"]
|
||||
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties", "is_stale", "last_seen"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
|
@ -179,8 +180,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
|||
# Channel Group
|
||||
#
|
||||
class ChannelGroupSerializer(serializers.ModelSerializer):
|
||||
channel_count = serializers.IntegerField(read_only=True)
|
||||
m3u_account_count = serializers.IntegerField(read_only=True)
|
||||
channel_count = serializers.SerializerMethodField()
|
||||
m3u_account_count = serializers.SerializerMethodField()
|
||||
m3u_accounts = ChannelGroupM3UAccountSerializer(
|
||||
many=True,
|
||||
read_only=True
|
||||
|
|
@ -190,6 +191,14 @@ class ChannelGroupSerializer(serializers.ModelSerializer):
|
|||
model = ChannelGroup
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get count of channels in this group"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_m3u_account_count(self, obj):
|
||||
"""Get count of M3U accounts associated with this group"""
|
||||
return obj.m3u_accounts.count()
|
||||
|
||||
|
||||
class ChannelProfileSerializer(serializers.ModelSerializer):
|
||||
channels = serializers.SerializerMethodField()
|
||||
|
|
@ -294,8 +303,17 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
|
||||
if include_streams:
|
||||
self.fields["streams"] = serializers.SerializerMethodField()
|
||||
|
||||
return super().to_representation(instance)
|
||||
return super().to_representation(instance)
|
||||
else:
|
||||
# Fix: For PATCH/PUT responses, ensure streams are ordered
|
||||
representation = super().to_representation(instance)
|
||||
if "streams" in representation:
|
||||
representation["streams"] = list(
|
||||
instance.streams.all()
|
||||
.order_by("channelstream__order")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
return representation
|
||||
|
||||
def get_logo(self, obj):
|
||||
return LogoSerializer(obj.logo).data
|
||||
|
|
|
|||
|
|
@ -295,7 +295,11 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True
|
|||
if score > 50: # Only show decent matches
|
||||
logger.debug(f" EPG '{row['name']}' (norm: '{row['norm_name']}') => score: {score} (base: {base_score}, bonus: {bonus})")
|
||||
|
||||
if score > best_score:
|
||||
# When scores are equal, prefer higher priority EPG source
|
||||
row_priority = row.get('epg_source_priority', 0)
|
||||
best_priority = best_epg.get('epg_source_priority', 0) if best_epg else -1
|
||||
|
||||
if score > best_score or (score == best_score and row_priority > best_priority):
|
||||
best_score = score
|
||||
best_epg = row
|
||||
|
||||
|
|
@ -471,9 +475,9 @@ def match_epg_channels():
|
|||
"norm_chan": normalize_name(channel.name) # Always use channel name for fuzzy matching!
|
||||
})
|
||||
|
||||
# Get all EPG data
|
||||
# Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data = []
|
||||
for epg in EPGData.objects.all():
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True):
|
||||
normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -482,9 +486,13 @@ def match_epg_channels():
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries")
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries (from active sources only)")
|
||||
|
||||
# Run EPG matching with progress updates - automatically uses conservative thresholds for bulk operations
|
||||
result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True)
|
||||
|
|
@ -618,9 +626,9 @@ def match_selected_channels_epg(channel_ids):
|
|||
"norm_chan": normalize_name(channel.name)
|
||||
})
|
||||
|
||||
# Get all EPG data
|
||||
# Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data = []
|
||||
for epg in EPGData.objects.all():
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True):
|
||||
normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -629,9 +637,13 @@ def match_selected_channels_epg(channel_ids):
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries")
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries (from active sources only)")
|
||||
|
||||
# Run EPG matching with progress updates - automatically uses appropriate thresholds
|
||||
result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True)
|
||||
|
|
@ -749,9 +761,10 @@ def match_single_channel_epg(channel_id):
|
|||
test_normalized = normalize_name(test_name)
|
||||
logger.debug(f"DEBUG normalization example: '{test_name}' → '{test_normalized}' (call sign preserved)")
|
||||
|
||||
# Get all EPG data for matching - must include norm_name field
|
||||
# Get all EPG data for matching from active sources - must include norm_name field
|
||||
# Ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data_list = []
|
||||
for epg in EPGData.objects.filter(name__isnull=False).exclude(name=''):
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True, name__isnull=False).exclude(name=''):
|
||||
normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data_list.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -760,10 +773,14 @@ def match_single_channel_epg(channel_id):
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data_list.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
if not epg_data_list:
|
||||
return {"matched": False, "message": "No EPG data available for matching"}
|
||||
return {"matched": False, "message": "No EPG data available for matching (from active sources)"}
|
||||
|
||||
logger.info(f"Matching single channel '{channel.name}' against {len(epg_data_list)} EPG entries")
|
||||
|
||||
|
|
@ -1434,6 +1451,18 @@ def run_recording(recording_id, channel_id, start_time_str, end_time_str):
|
|||
|
||||
logger.info(f"Starting recording for channel {channel.name}")
|
||||
|
||||
# Log system event for recording start
|
||||
try:
|
||||
from core.utils import log_system_event
|
||||
log_system_event(
|
||||
'recording_start',
|
||||
channel_id=channel.uuid,
|
||||
channel_name=channel.name,
|
||||
recording_id=recording_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log recording start event: {e}")
|
||||
|
||||
# Try to resolve the Recording row up front
|
||||
recording_obj = None
|
||||
try:
|
||||
|
|
@ -1827,6 +1856,20 @@ def run_recording(recording_id, channel_id, start_time_str, end_time_str):
|
|||
# After the loop, the file and response are closed automatically.
|
||||
logger.info(f"Finished recording for channel {channel.name}")
|
||||
|
||||
# Log system event for recording end
|
||||
try:
|
||||
from core.utils import log_system_event
|
||||
log_system_event(
|
||||
'recording_end',
|
||||
channel_id=channel.uuid,
|
||||
channel_name=channel.name,
|
||||
recording_id=recording_id,
|
||||
interrupted=interrupted,
|
||||
bytes_written=bytes_written
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log recording end event: {e}")
|
||||
|
||||
# Remux TS to MKV container
|
||||
remux_success = False
|
||||
try:
|
||||
|
|
@ -2636,7 +2679,38 @@ def bulk_create_channels_from_streams(self, stream_ids, channel_profile_ids=None
|
|||
)
|
||||
|
||||
# Handle channel profile membership
|
||||
if profile_ids:
|
||||
# Semantics:
|
||||
# - None: add to ALL profiles (backward compatible default)
|
||||
# - Empty array []: add to NO profiles
|
||||
# - Sentinel [0] or 0 in array: add to ALL profiles (explicit)
|
||||
# - [1,2,...]: add to specified profile IDs only
|
||||
if profile_ids is None:
|
||||
# Omitted -> add to all profiles (backward compatible)
|
||||
all_profiles = ChannelProfile.objects.all()
|
||||
channel_profile_memberships.extend([
|
||||
ChannelProfileMembership(
|
||||
channel_profile=profile,
|
||||
channel=channel,
|
||||
enabled=True
|
||||
)
|
||||
for profile in all_profiles
|
||||
])
|
||||
elif isinstance(profile_ids, list) and len(profile_ids) == 0:
|
||||
# Empty array -> add to no profiles
|
||||
pass
|
||||
elif isinstance(profile_ids, list) and 0 in profile_ids:
|
||||
# Sentinel 0 -> add to all profiles (explicit)
|
||||
all_profiles = ChannelProfile.objects.all()
|
||||
channel_profile_memberships.extend([
|
||||
ChannelProfileMembership(
|
||||
channel_profile=profile,
|
||||
channel=channel,
|
||||
enabled=True
|
||||
)
|
||||
for profile in all_profiles
|
||||
])
|
||||
else:
|
||||
# Specific profile IDs
|
||||
try:
|
||||
specific_profiles = ChannelProfile.objects.filter(id__in=profile_ids)
|
||||
channel_profile_memberships.extend([
|
||||
|
|
@ -2652,17 +2726,6 @@ def bulk_create_channels_from_streams(self, stream_ids, channel_profile_ids=None
|
|||
'channel_id': channel.id,
|
||||
'error': f'Failed to add to profiles: {str(e)}'
|
||||
})
|
||||
else:
|
||||
# Add to all profiles by default
|
||||
all_profiles = ChannelProfile.objects.all()
|
||||
channel_profile_memberships.extend([
|
||||
ChannelProfileMembership(
|
||||
channel_profile=profile,
|
||||
channel=channel,
|
||||
enabled=True
|
||||
)
|
||||
for profile in all_profiles
|
||||
])
|
||||
|
||||
# Bulk update channels with logos
|
||||
if update:
|
||||
|
|
|
|||
211
apps/channels/tests/test_channel_api.py
Normal file
211
apps/channels/tests/test_channel_api.py
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
from django.test import TestCase
|
||||
from django.contrib.auth import get_user_model
|
||||
from rest_framework.test import APIClient
|
||||
from rest_framework import status
|
||||
|
||||
from apps.channels.models import Channel, ChannelGroup
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class ChannelBulkEditAPITests(TestCase):
|
||||
def setUp(self):
|
||||
# Create a test admin user (user_level >= 10) and authenticate
|
||||
self.user = User.objects.create_user(username="testuser", password="testpass123")
|
||||
self.user.user_level = 10 # Set admin level
|
||||
self.user.save()
|
||||
self.client = APIClient()
|
||||
self.client.force_authenticate(user=self.user)
|
||||
self.bulk_edit_url = "/api/channels/channels/edit/bulk/"
|
||||
|
||||
# Create test channel group
|
||||
self.group1 = ChannelGroup.objects.create(name="Test Group 1")
|
||||
self.group2 = ChannelGroup.objects.create(name="Test Group 2")
|
||||
|
||||
# Create test channels
|
||||
self.channel1 = Channel.objects.create(
|
||||
channel_number=1.0,
|
||||
name="Channel 1",
|
||||
tvg_id="channel1",
|
||||
channel_group=self.group1
|
||||
)
|
||||
self.channel2 = Channel.objects.create(
|
||||
channel_number=2.0,
|
||||
name="Channel 2",
|
||||
tvg_id="channel2",
|
||||
channel_group=self.group1
|
||||
)
|
||||
self.channel3 = Channel.objects.create(
|
||||
channel_number=3.0,
|
||||
name="Channel 3",
|
||||
tvg_id="channel3"
|
||||
)
|
||||
|
||||
def test_bulk_edit_success(self):
|
||||
"""Test successful bulk update of multiple channels"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "Updated Channel 1"},
|
||||
{"id": self.channel2.id, "name": "Updated Channel 2", "channel_number": 22.0},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
self.assertEqual(len(response.data["channels"]), 2)
|
||||
|
||||
# Verify database changes
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel2.refresh_from_db()
|
||||
self.assertEqual(self.channel1.name, "Updated Channel 1")
|
||||
self.assertEqual(self.channel2.name, "Updated Channel 2")
|
||||
self.assertEqual(self.channel2.channel_number, 22.0)
|
||||
|
||||
def test_bulk_edit_with_empty_validated_data_first(self):
|
||||
"""
|
||||
Test the bug fix: when first channel has empty validated_data.
|
||||
This was causing: ValueError: Field names must be given to bulk_update()
|
||||
"""
|
||||
# Create a channel with data that will be "unchanged" (empty validated_data)
|
||||
# We'll send the same data it already has
|
||||
data = [
|
||||
# First channel: no actual changes (this would create empty validated_data)
|
||||
{"id": self.channel1.id},
|
||||
# Second channel: has changes
|
||||
{"id": self.channel2.id, "name": "Updated Channel 2"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should not crash with ValueError
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
|
||||
# Verify the channel with changes was updated
|
||||
self.channel2.refresh_from_db()
|
||||
self.assertEqual(self.channel2.name, "Updated Channel 2")
|
||||
|
||||
def test_bulk_edit_all_empty_updates(self):
|
||||
"""Test when all channels have empty updates (no actual changes)"""
|
||||
data = [
|
||||
{"id": self.channel1.id},
|
||||
{"id": self.channel2.id},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should succeed without calling bulk_update
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
|
||||
def test_bulk_edit_mixed_fields(self):
|
||||
"""Test bulk update where different channels update different fields"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "New Name 1"},
|
||||
{"id": self.channel2.id, "channel_number": 99.0},
|
||||
{"id": self.channel3.id, "tvg_id": "new_tvg_id", "name": "New Name 3"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 3 channels")
|
||||
|
||||
# Verify all updates
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel2.refresh_from_db()
|
||||
self.channel3.refresh_from_db()
|
||||
|
||||
self.assertEqual(self.channel1.name, "New Name 1")
|
||||
self.assertEqual(self.channel2.channel_number, 99.0)
|
||||
self.assertEqual(self.channel3.tvg_id, "new_tvg_id")
|
||||
self.assertEqual(self.channel3.name, "New Name 3")
|
||||
|
||||
def test_bulk_edit_with_channel_group(self):
|
||||
"""Test bulk update with channel_group_id changes"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "channel_group_id": self.group2.id},
|
||||
{"id": self.channel3.id, "channel_group_id": self.group1.id},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Verify group changes
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel3.refresh_from_db()
|
||||
self.assertEqual(self.channel1.channel_group, self.group2)
|
||||
self.assertEqual(self.channel3.channel_group, self.group1)
|
||||
|
||||
def test_bulk_edit_nonexistent_channel(self):
|
||||
"""Test bulk update with a channel that doesn't exist"""
|
||||
nonexistent_id = 99999
|
||||
data = [
|
||||
{"id": nonexistent_id, "name": "Should Fail"},
|
||||
{"id": self.channel1.id, "name": "Should Still Update"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should return 400 with errors
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("errors", response.data)
|
||||
self.assertEqual(len(response.data["errors"]), 1)
|
||||
self.assertEqual(response.data["errors"][0]["channel_id"], nonexistent_id)
|
||||
self.assertEqual(response.data["errors"][0]["error"], "Channel not found")
|
||||
|
||||
# The valid channel should still be updated
|
||||
self.assertEqual(response.data["updated_count"], 1)
|
||||
|
||||
def test_bulk_edit_validation_error(self):
|
||||
"""Test bulk update with invalid data (validation error)"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "channel_number": "invalid_number"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should return 400 with validation errors
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("errors", response.data)
|
||||
self.assertEqual(len(response.data["errors"]), 1)
|
||||
self.assertIn("channel_number", response.data["errors"][0]["errors"])
|
||||
|
||||
def test_bulk_edit_empty_channel_updates(self):
|
||||
"""Test bulk update with empty list"""
|
||||
data = []
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Empty list is accepted and returns success with 0 updates
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 0 channels")
|
||||
|
||||
def test_bulk_edit_missing_channel_updates(self):
|
||||
"""Test bulk update without proper format (dict instead of list)"""
|
||||
data = {"channel_updates": {}}
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertEqual(response.data["error"], "Expected a list of channel updates")
|
||||
|
||||
def test_bulk_edit_preserves_other_fields(self):
|
||||
"""Test that bulk update only changes specified fields"""
|
||||
original_channel_number = self.channel1.channel_number
|
||||
original_tvg_id = self.channel1.tvg_id
|
||||
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "Only Name Changed"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Verify only name changed, other fields preserved
|
||||
self.channel1.refresh_from_db()
|
||||
self.assertEqual(self.channel1.name, "Only Name Changed")
|
||||
self.assertEqual(self.channel1.channel_number, original_channel_number)
|
||||
self.assertEqual(self.channel1.tvg_id, original_tvg_id)
|
||||
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-12-05 15:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0020_migrate_time_to_starttime_placeholders'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -45,6 +45,10 @@ class EPGSource(models.Model):
|
|||
null=True,
|
||||
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel."
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ class EPGSourceSerializer(serializers.ModelSerializer):
|
|||
'is_active',
|
||||
'file_path',
|
||||
'refresh_interval',
|
||||
'priority',
|
||||
'status',
|
||||
'last_message',
|
||||
'created_at',
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ from asgiref.sync import async_to_sync
|
|||
from channels.layers import get_channel_layer
|
||||
|
||||
from .models import EPGSource, EPGData, ProgramData
|
||||
from core.utils import acquire_task_lock, release_task_lock, send_websocket_update, cleanup_memory
|
||||
from core.utils import acquire_task_lock, release_task_lock, send_websocket_update, cleanup_memory, log_system_event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -286,11 +286,12 @@ def fetch_xmltv(source):
|
|||
logger.info(f"Fetching XMLTV data from source: {source.name}")
|
||||
try:
|
||||
# Get default user agent from settings
|
||||
default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first()
|
||||
stream_settings = CoreSettings.get_stream_settings()
|
||||
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default
|
||||
if default_user_agent_setting and default_user_agent_setting.value:
|
||||
default_user_agent_id = stream_settings.get('default_user_agent')
|
||||
if default_user_agent_id:
|
||||
try:
|
||||
user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first()
|
||||
user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_id)).first()
|
||||
if user_agent_obj and user_agent_obj.user_agent:
|
||||
user_agent = user_agent_obj.user_agent
|
||||
logger.debug(f"Using default user agent: {user_agent}")
|
||||
|
|
@ -1393,11 +1394,23 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
|
||||
|
||||
def parse_programs_for_source(epg_source, tvg_id=None):
|
||||
"""
|
||||
Parse programs for all MAPPED channels from an EPG source in a single pass.
|
||||
|
||||
This is an optimized version that:
|
||||
1. Only processes EPG entries that are actually mapped to channels
|
||||
2. Parses the XML file ONCE instead of once per channel
|
||||
3. Skips programmes for unmapped channels entirely during parsing
|
||||
|
||||
This dramatically improves performance when an EPG source has many channels
|
||||
but only a fraction are mapped.
|
||||
"""
|
||||
# Send initial programs parsing notification
|
||||
send_epg_update(epg_source.id, "parsing_programs", 0)
|
||||
should_log_memory = False
|
||||
process = None
|
||||
initial_memory = 0
|
||||
source_file = None
|
||||
|
||||
# Add memory tracking only in trace mode or higher
|
||||
try:
|
||||
|
|
@ -1417,91 +1430,251 @@ def parse_programs_for_source(epg_source, tvg_id=None):
|
|||
should_log_memory = False
|
||||
|
||||
try:
|
||||
# Process EPG entries in batches rather than all at once
|
||||
batch_size = 20 # Process fewer channels at once to reduce memory usage
|
||||
epg_count = EPGData.objects.filter(epg_source=epg_source).count()
|
||||
# Only get EPG entries that are actually mapped to channels
|
||||
mapped_epg_ids = set(
|
||||
Channel.objects.filter(
|
||||
epg_data__epg_source=epg_source,
|
||||
epg_data__isnull=False
|
||||
).values_list('epg_data_id', flat=True)
|
||||
)
|
||||
|
||||
if epg_count == 0:
|
||||
logger.info(f"No EPG entries found for source: {epg_source.name}")
|
||||
# Update status - this is not an error, just no entries
|
||||
if not mapped_epg_ids:
|
||||
total_epg_count = EPGData.objects.filter(epg_source=epg_source).count()
|
||||
logger.info(f"No channels mapped to any EPG entries from source: {epg_source.name} "
|
||||
f"(source has {total_epg_count} EPG entries, 0 mapped)")
|
||||
# Update status - this is not an error, just no mapped entries
|
||||
epg_source.status = 'success'
|
||||
epg_source.save(update_fields=['status'])
|
||||
epg_source.last_message = f"No channels mapped to this EPG source ({total_epg_count} entries available)"
|
||||
epg_source.save(update_fields=['status', 'last_message'])
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100, status="success")
|
||||
return True
|
||||
|
||||
logger.info(f"Parsing programs for {epg_count} EPG entries from source: {epg_source.name}")
|
||||
# Get the mapped EPG entries with their tvg_ids
|
||||
mapped_epgs = EPGData.objects.filter(id__in=mapped_epg_ids).values('id', 'tvg_id')
|
||||
tvg_id_to_epg_id = {epg['tvg_id']: epg['id'] for epg in mapped_epgs if epg['tvg_id']}
|
||||
mapped_tvg_ids = set(tvg_id_to_epg_id.keys())
|
||||
|
||||
failed_entries = []
|
||||
program_count = 0
|
||||
channel_count = 0
|
||||
updated_count = 0
|
||||
processed = 0
|
||||
# Process in batches using cursor-based approach to limit memory usage
|
||||
last_id = 0
|
||||
while True:
|
||||
# Get a batch of EPG entries
|
||||
batch_entries = list(EPGData.objects.filter(
|
||||
epg_source=epg_source,
|
||||
id__gt=last_id
|
||||
).order_by('id')[:batch_size])
|
||||
total_epg_count = EPGData.objects.filter(epg_source=epg_source).count()
|
||||
mapped_count = len(mapped_tvg_ids)
|
||||
|
||||
if not batch_entries:
|
||||
break # No more entries to process
|
||||
logger.info(f"Parsing programs for {mapped_count} MAPPED channels from source: {epg_source.name} "
|
||||
f"(skipping {total_epg_count - mapped_count} unmapped EPG entries)")
|
||||
|
||||
# Update last_id for next iteration
|
||||
last_id = batch_entries[-1].id
|
||||
# Get the file path
|
||||
file_path = epg_source.extracted_file_path if epg_source.extracted_file_path else epg_source.file_path
|
||||
if not file_path:
|
||||
file_path = epg_source.get_cache_file()
|
||||
|
||||
# Process this batch
|
||||
for epg in batch_entries:
|
||||
if epg.tvg_id:
|
||||
try:
|
||||
result = parse_programs_for_tvg_id(epg.id)
|
||||
if result == "Task already running":
|
||||
logger.info(f"Program parse for {epg.id} already in progress, skipping")
|
||||
# Check if the file exists
|
||||
if not os.path.exists(file_path):
|
||||
logger.error(f"EPG file not found at: {file_path}")
|
||||
|
||||
processed += 1
|
||||
progress = min(95, int((processed / epg_count) * 100)) if epg_count > 0 else 50
|
||||
send_epg_update(epg_source.id, "parsing_programs", progress)
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing programs for tvg_id={epg.tvg_id}: {e}", exc_info=True)
|
||||
failed_entries.append(f"{epg.tvg_id}: {str(e)}")
|
||||
if epg_source.url:
|
||||
# Update the file path in the database
|
||||
new_path = epg_source.get_cache_file()
|
||||
logger.info(f"Updating file_path from '{file_path}' to '{new_path}'")
|
||||
epg_source.file_path = new_path
|
||||
epg_source.save(update_fields=['file_path'])
|
||||
logger.info(f"Fetching new EPG data from URL: {epg_source.url}")
|
||||
|
||||
# Force garbage collection after each batch
|
||||
batch_entries = None # Remove reference to help garbage collection
|
||||
# Fetch new data before continuing
|
||||
fetch_success = fetch_xmltv(epg_source)
|
||||
|
||||
if not fetch_success:
|
||||
logger.error(f"Failed to fetch EPG data for source: {epg_source.name}")
|
||||
epg_source.status = 'error'
|
||||
epg_source.last_message = f"Failed to download EPG data"
|
||||
epg_source.save(update_fields=['status', 'last_message'])
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100, status="error", error="Failed to download EPG file")
|
||||
return False
|
||||
|
||||
# Update file_path with the new location
|
||||
file_path = epg_source.extracted_file_path if epg_source.extracted_file_path else epg_source.file_path
|
||||
else:
|
||||
logger.error(f"No URL provided for EPG source {epg_source.name}, cannot fetch new data")
|
||||
epg_source.status = 'error'
|
||||
epg_source.last_message = f"No URL provided, cannot fetch EPG data"
|
||||
epg_source.save(update_fields=['status', 'last_message'])
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100, status="error", error="No URL provided")
|
||||
return False
|
||||
|
||||
# SINGLE PASS PARSING: Parse the XML file once and collect all programs in memory
|
||||
# We parse FIRST, then do an atomic delete+insert to avoid race conditions
|
||||
# where clients might see empty/partial EPG data during the transition
|
||||
all_programs_to_create = []
|
||||
programs_by_channel = {tvg_id: 0 for tvg_id in mapped_tvg_ids} # Track count per channel
|
||||
total_programs = 0
|
||||
skipped_programs = 0
|
||||
last_progress_update = 0
|
||||
|
||||
try:
|
||||
logger.debug(f"Opening file for single-pass parsing: {file_path}")
|
||||
source_file = open(file_path, 'rb')
|
||||
|
||||
# Stream parse the file using lxml's iterparse
|
||||
program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True, recover=True)
|
||||
|
||||
for _, elem in program_parser:
|
||||
channel_id = elem.get('channel')
|
||||
|
||||
# Skip programmes for unmapped channels immediately
|
||||
if channel_id not in mapped_tvg_ids:
|
||||
skipped_programs += 1
|
||||
# Clear element to free memory
|
||||
clear_element(elem)
|
||||
continue
|
||||
|
||||
# This programme is for a mapped channel - process it
|
||||
try:
|
||||
start_time = parse_xmltv_time(elem.get('start'))
|
||||
end_time = parse_xmltv_time(elem.get('stop'))
|
||||
title = None
|
||||
desc = None
|
||||
sub_title = None
|
||||
|
||||
# Efficiently process child elements
|
||||
for child in elem:
|
||||
if child.tag == 'title':
|
||||
title = child.text or 'No Title'
|
||||
elif child.tag == 'desc':
|
||||
desc = child.text or ''
|
||||
elif child.tag == 'sub-title':
|
||||
sub_title = child.text or ''
|
||||
|
||||
if not title:
|
||||
title = 'No Title'
|
||||
|
||||
# Extract custom properties
|
||||
custom_props = extract_custom_properties(elem)
|
||||
custom_properties_json = custom_props if custom_props else None
|
||||
|
||||
epg_id = tvg_id_to_epg_id[channel_id]
|
||||
all_programs_to_create.append(ProgramData(
|
||||
epg_id=epg_id,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
title=title,
|
||||
description=desc,
|
||||
sub_title=sub_title,
|
||||
tvg_id=channel_id,
|
||||
custom_properties=custom_properties_json
|
||||
))
|
||||
total_programs += 1
|
||||
programs_by_channel[channel_id] += 1
|
||||
|
||||
# Clear the element to free memory
|
||||
clear_element(elem)
|
||||
|
||||
# Send progress update (estimate based on programs processed)
|
||||
if total_programs - last_progress_update >= 5000:
|
||||
last_progress_update = total_programs
|
||||
# Cap at 70% during parsing phase (save 30% for DB operations)
|
||||
progress = min(70, 10 + int((total_programs / max(total_programs + 10000, 1)) * 60))
|
||||
send_epg_update(epg_source.id, "parsing_programs", progress,
|
||||
processed=total_programs, channels=mapped_count)
|
||||
|
||||
# Periodic garbage collection during parsing
|
||||
if total_programs % 5000 == 0:
|
||||
gc.collect()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing program for {channel_id}: {e}", exc_info=True)
|
||||
clear_element(elem)
|
||||
continue
|
||||
|
||||
except etree.XMLSyntaxError as xml_error:
|
||||
logger.error(f"XML syntax error parsing program data: {xml_error}")
|
||||
epg_source.status = EPGSource.STATUS_ERROR
|
||||
epg_source.last_message = f"XML parsing error: {str(xml_error)}"
|
||||
epg_source.save(update_fields=['status', 'last_message'])
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100, status="error", message=str(xml_error))
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing XML for programs: {e}", exc_info=True)
|
||||
raise
|
||||
finally:
|
||||
if source_file:
|
||||
source_file.close()
|
||||
source_file = None
|
||||
|
||||
# Now perform atomic delete + bulk insert
|
||||
# This ensures clients never see empty/partial EPG data
|
||||
logger.info(f"Parsed {total_programs} programs, performing atomic database update...")
|
||||
send_epg_update(epg_source.id, "parsing_programs", 75, message="Updating database...")
|
||||
|
||||
batch_size = 1000
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Delete existing programs for mapped EPGs
|
||||
deleted_count = ProgramData.objects.filter(epg_id__in=mapped_epg_ids).delete()[0]
|
||||
logger.debug(f"Deleted {deleted_count} existing programs")
|
||||
|
||||
# Clean up orphaned programs for unmapped EPG entries
|
||||
unmapped_epg_ids = list(EPGData.objects.filter(
|
||||
epg_source=epg_source
|
||||
).exclude(id__in=mapped_epg_ids).values_list('id', flat=True))
|
||||
|
||||
if unmapped_epg_ids:
|
||||
orphaned_count = ProgramData.objects.filter(epg_id__in=unmapped_epg_ids).delete()[0]
|
||||
if orphaned_count > 0:
|
||||
logger.info(f"Cleaned up {orphaned_count} orphaned programs for {len(unmapped_epg_ids)} unmapped EPG entries")
|
||||
|
||||
# Bulk insert all new programs in batches within the same transaction
|
||||
for i in range(0, len(all_programs_to_create), batch_size):
|
||||
batch = all_programs_to_create[i:i + batch_size]
|
||||
ProgramData.objects.bulk_create(batch)
|
||||
|
||||
# Update progress during insertion
|
||||
progress = 75 + int((i / len(all_programs_to_create)) * 20) if all_programs_to_create else 95
|
||||
if i % (batch_size * 5) == 0:
|
||||
send_epg_update(epg_source.id, "parsing_programs", min(95, progress),
|
||||
message=f"Inserting programs... {i}/{len(all_programs_to_create)}")
|
||||
|
||||
logger.info(f"Atomic update complete: deleted {deleted_count}, inserted {total_programs} programs")
|
||||
|
||||
except Exception as db_error:
|
||||
logger.error(f"Database error during atomic update: {db_error}", exc_info=True)
|
||||
epg_source.status = EPGSource.STATUS_ERROR
|
||||
epg_source.last_message = f"Database error: {str(db_error)}"
|
||||
epg_source.save(update_fields=['status', 'last_message'])
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100, status="error", message=str(db_error))
|
||||
return False
|
||||
finally:
|
||||
# Clear the large list to free memory
|
||||
all_programs_to_create = None
|
||||
gc.collect()
|
||||
|
||||
# If there were failures, include them in the message but continue
|
||||
if failed_entries:
|
||||
epg_source.status = EPGSource.STATUS_SUCCESS # Still mark as success if some processed
|
||||
error_summary = f"Failed to parse {len(failed_entries)} of {epg_count} entries"
|
||||
stats_summary = f"Processed {program_count} programs across {channel_count} channels. Updated: {updated_count}."
|
||||
epg_source.last_message = f"{stats_summary} Warning: {error_summary}"
|
||||
epg_source.updated_at = timezone.now()
|
||||
epg_source.save(update_fields=['status', 'last_message', 'updated_at'])
|
||||
# Count channels that actually got programs
|
||||
channels_with_programs = sum(1 for count in programs_by_channel.values() if count > 0)
|
||||
|
||||
# Send completion notification with mixed status
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100,
|
||||
status="success",
|
||||
message=epg_source.last_message)
|
||||
|
||||
# Explicitly release memory of large lists before returning
|
||||
del failed_entries
|
||||
gc.collect()
|
||||
|
||||
return True
|
||||
|
||||
# If all successful, set a comprehensive success message
|
||||
# Success message
|
||||
epg_source.status = EPGSource.STATUS_SUCCESS
|
||||
epg_source.last_message = f"Successfully processed {program_count} programs across {channel_count} channels. Updated: {updated_count}."
|
||||
epg_source.last_message = (
|
||||
f"Parsed {total_programs:,} programs for {channels_with_programs} channels "
|
||||
f"(skipped {skipped_programs:,} programs for {total_epg_count - mapped_count} unmapped channels)"
|
||||
)
|
||||
epg_source.updated_at = timezone.now()
|
||||
epg_source.save(update_fields=['status', 'last_message', 'updated_at'])
|
||||
|
||||
# Log system event for EPG refresh
|
||||
log_system_event(
|
||||
event_type='epg_refresh',
|
||||
source_name=epg_source.name,
|
||||
programs=total_programs,
|
||||
channels=channels_with_programs,
|
||||
skipped_programs=skipped_programs,
|
||||
unmapped_channels=total_epg_count - mapped_count,
|
||||
)
|
||||
|
||||
# Send completion notification with status
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100,
|
||||
status="success",
|
||||
message=epg_source.last_message)
|
||||
message=epg_source.last_message,
|
||||
updated_at=epg_source.updated_at.isoformat())
|
||||
|
||||
logger.info(f"Completed parsing all programs for source: {epg_source.name}")
|
||||
logger.info(f"Completed parsing programs for source: {epg_source.name} - "
|
||||
f"{total_programs:,} programs for {channels_with_programs} channels, "
|
||||
f"skipped {skipped_programs:,} programs for unmapped channels")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
|
|
@ -1516,14 +1689,19 @@ def parse_programs_for_source(epg_source, tvg_id=None):
|
|||
return False
|
||||
finally:
|
||||
# Final memory cleanup and tracking
|
||||
|
||||
if source_file:
|
||||
try:
|
||||
source_file.close()
|
||||
except:
|
||||
pass
|
||||
source_file = None
|
||||
|
||||
# Explicitly release any remaining large data structures
|
||||
failed_entries = None
|
||||
program_count = None
|
||||
channel_count = None
|
||||
updated_count = None
|
||||
processed = None
|
||||
programs_to_create = None
|
||||
programs_by_channel = None
|
||||
mapped_epg_ids = None
|
||||
mapped_tvg_ids = None
|
||||
tvg_id_to_epg_id = None
|
||||
gc.collect()
|
||||
|
||||
# Add comprehensive memory cleanup at the end
|
||||
|
|
@ -1537,12 +1715,13 @@ def fetch_schedules_direct(source):
|
|||
logger.info(f"Fetching Schedules Direct data from source: {source.name}")
|
||||
try:
|
||||
# Get default user agent from settings
|
||||
default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first()
|
||||
stream_settings = CoreSettings.get_stream_settings()
|
||||
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default
|
||||
default_user_agent_id = stream_settings.get('default_user_agent')
|
||||
|
||||
if default_user_agent_setting and default_user_agent_setting.value:
|
||||
if default_user_agent_id:
|
||||
try:
|
||||
user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first()
|
||||
user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_id)).first()
|
||||
if user_agent_obj and user_agent_obj.user_agent:
|
||||
user_agent = user_agent_obj.user_agent
|
||||
logger.debug(f"Using default user agent: {user_agent}")
|
||||
|
|
|
|||
|
|
@ -152,6 +152,46 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
and not old_vod_enabled
|
||||
and new_vod_enabled
|
||||
):
|
||||
# Create Uncategorized categories immediately so they're available in the UI
|
||||
from apps.vod.models import VODCategory, M3UVODCategoryRelation
|
||||
|
||||
# Create movie Uncategorized category
|
||||
movie_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="movie",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Create series Uncategorized category
|
||||
series_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="series",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Create relations for both categories (disabled by default until first refresh)
|
||||
account_custom_props = instance.custom_properties or {}
|
||||
auto_enable_new = account_custom_props.get("auto_enable_new_groups_vod", True)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=movie_category,
|
||||
m3u_account=instance,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=series_category,
|
||||
m3u_account=instance,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
# Trigger full VOD refresh
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(instance.id)
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ from core.utils import (
|
|||
acquire_task_lock,
|
||||
release_task_lock,
|
||||
natural_sort_key,
|
||||
log_system_event,
|
||||
)
|
||||
from core.models import CoreSettings, UserAgent
|
||||
from asgiref.sync import async_to_sync
|
||||
|
|
@ -512,7 +513,19 @@ def check_field_lengths(streams_to_create):
|
|||
|
||||
|
||||
@shared_task
|
||||
def process_groups(account, groups):
|
||||
def process_groups(account, groups, scan_start_time=None):
|
||||
"""Process groups and update their relationships with the M3U account.
|
||||
|
||||
Args:
|
||||
account: M3UAccount instance
|
||||
groups: Dict of group names to custom properties
|
||||
scan_start_time: Timestamp when the scan started (for consistent last_seen marking)
|
||||
"""
|
||||
# Use scan_start_time if provided, otherwise current time
|
||||
# This ensures consistency with stream processing and cleanup logic
|
||||
if scan_start_time is None:
|
||||
scan_start_time = timezone.now()
|
||||
|
||||
existing_groups = {
|
||||
group.name: group
|
||||
for group in ChannelGroup.objects.filter(name__in=groups.keys())
|
||||
|
|
@ -552,24 +565,8 @@ def process_groups(account, groups):
|
|||
).select_related('channel_group')
|
||||
}
|
||||
|
||||
# Get ALL existing relationships for this account to identify orphaned ones
|
||||
all_existing_relationships = {
|
||||
rel.channel_group.name: rel
|
||||
for rel in ChannelGroupM3UAccount.objects.filter(
|
||||
m3u_account=account
|
||||
).select_related('channel_group')
|
||||
}
|
||||
|
||||
relations_to_create = []
|
||||
relations_to_update = []
|
||||
relations_to_delete = []
|
||||
|
||||
# Find orphaned relationships (groups that no longer exist in the source)
|
||||
current_group_names = set(groups.keys())
|
||||
for group_name, rel in all_existing_relationships.items():
|
||||
if group_name not in current_group_names:
|
||||
relations_to_delete.append(rel)
|
||||
logger.debug(f"Marking relationship for deletion: group '{group_name}' no longer exists in source for account {account.id}")
|
||||
|
||||
for group in all_group_objs:
|
||||
custom_props = groups.get(group.name, {})
|
||||
|
|
@ -596,9 +593,15 @@ def process_groups(account, groups):
|
|||
del updated_custom_props["xc_id"]
|
||||
|
||||
existing_rel.custom_properties = updated_custom_props
|
||||
existing_rel.last_seen = scan_start_time
|
||||
existing_rel.is_stale = False
|
||||
relations_to_update.append(existing_rel)
|
||||
logger.debug(f"Updated xc_id for group '{group.name}' from '{existing_xc_id}' to '{new_xc_id}' - account {account.id}")
|
||||
else:
|
||||
# Update last_seen even if xc_id hasn't changed
|
||||
existing_rel.last_seen = scan_start_time
|
||||
existing_rel.is_stale = False
|
||||
relations_to_update.append(existing_rel)
|
||||
logger.debug(f"xc_id unchanged for group '{group.name}' - account {account.id}")
|
||||
else:
|
||||
# Create new relationship - this group is new to this M3U account
|
||||
|
|
@ -612,6 +615,8 @@ def process_groups(account, groups):
|
|||
m3u_account=account,
|
||||
custom_properties=custom_props,
|
||||
enabled=auto_enable_new_groups_live,
|
||||
last_seen=scan_start_time,
|
||||
is_stale=False,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -622,15 +627,38 @@ def process_groups(account, groups):
|
|||
|
||||
# Bulk update existing relationships
|
||||
if relations_to_update:
|
||||
ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties'])
|
||||
logger.info(f"Updated {len(relations_to_update)} existing group relationships with new xc_id values for account {account.id}")
|
||||
ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties', 'last_seen', 'is_stale'])
|
||||
logger.info(f"Updated {len(relations_to_update)} existing group relationships for account {account.id}")
|
||||
|
||||
# Delete orphaned relationships
|
||||
if relations_to_delete:
|
||||
ChannelGroupM3UAccount.objects.filter(
|
||||
id__in=[rel.id for rel in relations_to_delete]
|
||||
).delete()
|
||||
logger.info(f"Deleted {len(relations_to_delete)} orphaned group relationships for account {account.id}: {[rel.channel_group.name for rel in relations_to_delete]}")
|
||||
|
||||
def cleanup_stale_group_relationships(account, scan_start_time):
|
||||
"""
|
||||
Remove group relationships that haven't been seen since the stale retention period.
|
||||
This follows the same logic as stream cleanup for consistency.
|
||||
"""
|
||||
# Calculate cutoff date for stale group relationships
|
||||
stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days)
|
||||
logger.info(
|
||||
f"Removing group relationships not seen since {stale_cutoff} for M3U account {account.id}"
|
||||
)
|
||||
|
||||
# Find stale relationships
|
||||
stale_relationships = ChannelGroupM3UAccount.objects.filter(
|
||||
m3u_account=account,
|
||||
last_seen__lt=stale_cutoff
|
||||
).select_related('channel_group')
|
||||
|
||||
relations_to_delete = list(stale_relationships)
|
||||
deleted_count = len(relations_to_delete)
|
||||
|
||||
if deleted_count > 0:
|
||||
logger.info(
|
||||
f"Found {deleted_count} stale group relationships for account {account.id}: "
|
||||
f"{[rel.channel_group.name for rel in relations_to_delete]}"
|
||||
)
|
||||
|
||||
# Delete the stale relationships
|
||||
stale_relationships.delete()
|
||||
|
||||
# Check if any of the deleted relationships left groups with no remaining associations
|
||||
orphaned_group_ids = []
|
||||
|
|
@ -655,6 +683,10 @@ def process_groups(account, groups):
|
|||
deleted_groups = list(ChannelGroup.objects.filter(id__in=orphaned_group_ids).values_list('name', flat=True))
|
||||
ChannelGroup.objects.filter(id__in=orphaned_group_ids).delete()
|
||||
logger.info(f"Deleted {len(orphaned_group_ids)} orphaned groups that had no remaining associations: {deleted_groups}")
|
||||
else:
|
||||
logger.debug(f"No stale group relationships found for account {account.id}")
|
||||
|
||||
return deleted_count
|
||||
|
||||
|
||||
def collect_xc_streams(account_id, enabled_groups):
|
||||
|
|
@ -791,7 +823,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
group_title = group_name
|
||||
|
||||
stream_hash = Stream.generate_hash_key(
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title
|
||||
)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
|
|
@ -802,6 +834,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
"channel_group_id": int(group_id),
|
||||
"stream_hash": stream_hash,
|
||||
"custom_properties": stream,
|
||||
"is_stale": False,
|
||||
}
|
||||
|
||||
if stream_hash not in stream_hashes:
|
||||
|
|
@ -837,10 +870,12 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
setattr(obj, key, value)
|
||||
obj.last_seen = timezone.now()
|
||||
obj.updated_at = timezone.now() # Update timestamp only for changed streams
|
||||
obj.is_stale = False
|
||||
streams_to_update.append(obj)
|
||||
else:
|
||||
# Always update last_seen, even if nothing else changed
|
||||
obj.last_seen = timezone.now()
|
||||
obj.is_stale = False
|
||||
# Don't update updated_at for unchanged streams
|
||||
streams_to_update.append(obj)
|
||||
|
||||
|
|
@ -851,6 +886,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
stream_props["updated_at"] = (
|
||||
timezone.now()
|
||||
) # Set initial updated_at for new streams
|
||||
stream_props["is_stale"] = False
|
||||
streams_to_create.append(Stream(**stream_props))
|
||||
|
||||
try:
|
||||
|
|
@ -862,7 +898,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
# Simplified bulk update for better performance
|
||||
Stream.objects.bulk_update(
|
||||
streams_to_update,
|
||||
['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at'],
|
||||
['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at', 'is_stale'],
|
||||
batch_size=150 # Smaller batch size for XC processing
|
||||
)
|
||||
|
||||
|
|
@ -965,7 +1001,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
)
|
||||
continue
|
||||
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id)
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
"url": url,
|
||||
|
|
@ -975,6 +1011,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
"channel_group_id": int(groups.get(group_title)),
|
||||
"stream_hash": stream_hash,
|
||||
"custom_properties": stream_info["attributes"],
|
||||
"is_stale": False,
|
||||
}
|
||||
|
||||
if stream_hash not in stream_hashes:
|
||||
|
|
@ -1014,11 +1051,15 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
obj.custom_properties = stream_props["custom_properties"]
|
||||
obj.updated_at = timezone.now()
|
||||
|
||||
# Always mark as not stale since we saw it in this refresh
|
||||
obj.is_stale = False
|
||||
|
||||
streams_to_update.append(obj)
|
||||
else:
|
||||
# New stream
|
||||
stream_props["last_seen"] = timezone.now()
|
||||
stream_props["updated_at"] = timezone.now()
|
||||
stream_props["is_stale"] = False
|
||||
streams_to_create.append(Stream(**stream_props))
|
||||
|
||||
try:
|
||||
|
|
@ -1030,7 +1071,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
# Update all streams in a single bulk operation
|
||||
Stream.objects.bulk_update(
|
||||
streams_to_update,
|
||||
['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at'],
|
||||
['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at', 'is_stale'],
|
||||
batch_size=200
|
||||
)
|
||||
except Exception as e:
|
||||
|
|
@ -1091,7 +1132,15 @@ def cleanup_streams(account_id, scan_start_time=timezone.now):
|
|||
|
||||
|
||||
@shared_task
|
||||
def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
|
||||
def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False, scan_start_time=None):
|
||||
"""Refresh M3U groups for an account.
|
||||
|
||||
Args:
|
||||
account_id: ID of the M3U account
|
||||
use_cache: Whether to use cached M3U file
|
||||
full_refresh: Whether this is part of a full refresh
|
||||
scan_start_time: Timestamp when the scan started (for consistent last_seen marking)
|
||||
"""
|
||||
if not acquire_task_lock("refresh_m3u_account_groups", account_id):
|
||||
return f"Task already running for account_id={account_id}.", None
|
||||
|
||||
|
|
@ -1418,7 +1467,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
|
|||
|
||||
send_m3u_update(account_id, "processing_groups", 0)
|
||||
|
||||
process_groups(account, groups)
|
||||
process_groups(account, groups, scan_start_time)
|
||||
|
||||
release_task_lock("refresh_m3u_account_groups", account_id)
|
||||
|
||||
|
|
@ -2525,7 +2574,7 @@ def refresh_single_m3u_account(account_id):
|
|||
if not extinf_data:
|
||||
try:
|
||||
logger.info(f"Calling refresh_m3u_groups for account {account_id}")
|
||||
result = refresh_m3u_groups(account_id, full_refresh=True)
|
||||
result = refresh_m3u_groups(account_id, full_refresh=True, scan_start_time=refresh_start_timestamp)
|
||||
logger.trace(f"refresh_m3u_groups result: {result}")
|
||||
|
||||
# Check for completely empty result or missing groups
|
||||
|
|
@ -2805,9 +2854,26 @@ def refresh_single_m3u_account(account_id):
|
|||
id=-1
|
||||
).exists() # This will never find anything but ensures DB sync
|
||||
|
||||
# Mark streams that weren't seen in this refresh as stale (pending deletion)
|
||||
stale_stream_count = Stream.objects.filter(
|
||||
m3u_account=account,
|
||||
last_seen__lt=refresh_start_timestamp
|
||||
).update(is_stale=True)
|
||||
logger.info(f"Marked {stale_stream_count} streams as stale for account {account_id}")
|
||||
|
||||
# Mark group relationships that weren't seen in this refresh as stale (pending deletion)
|
||||
stale_group_count = ChannelGroupM3UAccount.objects.filter(
|
||||
m3u_account=account,
|
||||
last_seen__lt=refresh_start_timestamp
|
||||
).update(is_stale=True)
|
||||
logger.info(f"Marked {stale_group_count} group relationships as stale for account {account_id}")
|
||||
|
||||
# Now run cleanup
|
||||
streams_deleted = cleanup_streams(account_id, refresh_start_timestamp)
|
||||
|
||||
# Cleanup stale group relationships (follows same retention policy as streams)
|
||||
cleanup_stale_group_relationships(account, refresh_start_timestamp)
|
||||
|
||||
# Run auto channel sync after successful refresh
|
||||
auto_sync_message = ""
|
||||
try:
|
||||
|
|
@ -2840,6 +2906,17 @@ def refresh_single_m3u_account(account_id):
|
|||
account.updated_at = timezone.now()
|
||||
account.save(update_fields=["status", "last_message", "updated_at"])
|
||||
|
||||
# Log system event for M3U refresh
|
||||
log_system_event(
|
||||
event_type='m3u_refresh',
|
||||
account_name=account.name,
|
||||
elapsed_time=round(elapsed_time, 2),
|
||||
streams_created=streams_created,
|
||||
streams_updated=streams_updated,
|
||||
streams_deleted=streams_deleted,
|
||||
total_processed=streams_processed,
|
||||
)
|
||||
|
||||
# Send final update with complete metrics and explicitly include success status
|
||||
send_m3u_update(
|
||||
account_id,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from django.views.decorators.csrf import csrf_exempt
|
|||
from django.views.decorators.http import require_http_methods
|
||||
from apps.epg.models import ProgramData
|
||||
from apps.accounts.models import User
|
||||
from core.models import CoreSettings, NETWORK_ACCESS
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
from django.utils import timezone as django_timezone
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
|
@ -23,23 +22,86 @@ from django.db.models.functions import Lower
|
|||
import os
|
||||
from apps.m3u.utils import calculate_tuner_count
|
||||
import regex
|
||||
from core.utils import log_system_event
|
||||
import hashlib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def get_client_identifier(request):
|
||||
"""Get client information including IP, user agent, and a unique hash identifier
|
||||
|
||||
Returns:
|
||||
tuple: (client_id_hash, client_ip, user_agent)
|
||||
"""
|
||||
# Get client IP (handle proxies)
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if x_forwarded_for:
|
||||
client_ip = x_forwarded_for.split(',')[0].strip()
|
||||
else:
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
|
||||
# Get user agent
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
# Create a hash for a shorter cache key
|
||||
client_str = f"{client_ip}:{user_agent}"
|
||||
client_id_hash = hashlib.md5(client_str.encode()).hexdigest()[:12]
|
||||
|
||||
return client_id_hash, client_ip, user_agent
|
||||
|
||||
def m3u_endpoint(request, profile_name=None, user=None):
|
||||
logger.debug("m3u_endpoint called: method=%s, profile=%s", request.method, profile_name)
|
||||
if not network_access_allowed(request, "M3U_EPG"):
|
||||
# Log blocked M3U download
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='m3u_blocked',
|
||||
profile=profile_name or 'all',
|
||||
reason='Network access denied',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({"error": "Forbidden"}, status=403)
|
||||
|
||||
# Handle HEAD requests efficiently without generating content
|
||||
if request.method == "HEAD":
|
||||
logger.debug("Handling HEAD request for M3U")
|
||||
response = HttpResponse(content_type="audio/x-mpegurl")
|
||||
response["Content-Disposition"] = 'attachment; filename="channels.m3u"'
|
||||
return response
|
||||
|
||||
return generate_m3u(request, profile_name, user)
|
||||
|
||||
def epg_endpoint(request, profile_name=None, user=None):
|
||||
logger.debug("epg_endpoint called: method=%s, profile=%s", request.method, profile_name)
|
||||
if not network_access_allowed(request, "M3U_EPG"):
|
||||
# Log blocked EPG download
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='epg_blocked',
|
||||
profile=profile_name or 'all',
|
||||
reason='Network access denied',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({"error": "Forbidden"}, status=403)
|
||||
|
||||
# Handle HEAD requests efficiently without generating content
|
||||
if request.method == "HEAD":
|
||||
logger.debug("Handling HEAD request for EPG")
|
||||
response = HttpResponse(content_type="application/xml")
|
||||
response["Content-Disposition"] = 'attachment; filename="Dispatcharr.xml"'
|
||||
response["Cache-Control"] = "no-cache"
|
||||
return response
|
||||
|
||||
return generate_epg(request, profile_name, user)
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(["GET", "POST"])
|
||||
@require_http_methods(["GET", "POST", "HEAD"])
|
||||
def generate_m3u(request, profile_name=None, user=None):
|
||||
"""
|
||||
Dynamically generate an M3U file from channels.
|
||||
|
|
@ -47,7 +109,19 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
Supports both GET and POST methods for compatibility with IPTVSmarters.
|
||||
"""
|
||||
# Check if this is a POST request and the body is not empty (which we don't want to allow)
|
||||
logger.debug("Generating M3U for profile: %s, user: %s", profile_name, user.username if user else "Anonymous")
|
||||
logger.debug("Generating M3U for profile: %s, user: %s, method: %s", profile_name, user.username if user else "Anonymous", request.method)
|
||||
|
||||
# Check cache for recent identical request (helps with double-GET from browsers)
|
||||
from django.core.cache import cache
|
||||
cache_params = f"{profile_name or 'all'}:{user.username if user else 'anonymous'}:{request.GET.urlencode()}"
|
||||
content_cache_key = f"m3u_content:{cache_params}"
|
||||
|
||||
cached_content = cache.get(content_cache_key)
|
||||
if cached_content:
|
||||
logger.debug("Serving M3U from cache")
|
||||
response = HttpResponse(cached_content, content_type="audio/x-mpegurl")
|
||||
response["Content-Disposition"] = 'attachment; filename="channels.m3u"'
|
||||
return response
|
||||
# Check if this is a POST request with data (which we don't want to allow)
|
||||
if request.method == "POST" and request.body:
|
||||
if request.body.decode() != '{}':
|
||||
|
|
@ -76,20 +150,17 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
|
||||
else:
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
try:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
except ChannelProfile.DoesNotExist:
|
||||
logger.warning("Requested channel profile (%s) during m3u generation does not exist", profile_name)
|
||||
raise Http404(f"Channel profile '{profile_name}' not found")
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
else:
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
channels = Channel.objects.order_by("channel_number")
|
||||
|
||||
# Check if the request wants to use direct logo URLs instead of cache
|
||||
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
|
||||
|
|
@ -102,16 +173,26 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower()
|
||||
|
||||
# Build EPG URL with query parameters if needed
|
||||
epg_base_url = build_absolute_uri_with_port(request, reverse('output:epg_endpoint', args=[profile_name]) if profile_name else reverse('output:epg_endpoint'))
|
||||
# Check if this is an XC API request (has username/password in GET params and user is authenticated)
|
||||
xc_username = request.GET.get('username')
|
||||
xc_password = request.GET.get('password')
|
||||
|
||||
# Optionally preserve certain query parameters
|
||||
preserved_params = ['tvg_id_source', 'cachedlogos', 'days']
|
||||
query_params = {k: v for k, v in request.GET.items() if k in preserved_params}
|
||||
if query_params:
|
||||
from urllib.parse import urlencode
|
||||
epg_url = f"{epg_base_url}?{urlencode(query_params)}"
|
||||
if user is not None and xc_username and xc_password:
|
||||
# This is an XC API request - use XC-style EPG URL
|
||||
base_url = build_absolute_uri_with_port(request, '')
|
||||
epg_url = f"{base_url}/xmltv.php?username={xc_username}&password={xc_password}"
|
||||
else:
|
||||
epg_url = epg_base_url
|
||||
# Regular request - use standard EPG endpoint
|
||||
epg_base_url = build_absolute_uri_with_port(request, reverse('output:epg_endpoint', args=[profile_name]) if profile_name else reverse('output:epg_endpoint'))
|
||||
|
||||
# Optionally preserve certain query parameters
|
||||
preserved_params = ['tvg_id_source', 'cachedlogos', 'days']
|
||||
query_params = {k: v for k, v in request.GET.items() if k in preserved_params}
|
||||
if query_params:
|
||||
from urllib.parse import urlencode
|
||||
epg_url = f"{epg_base_url}?{urlencode(query_params)}"
|
||||
else:
|
||||
epg_url = epg_base_url
|
||||
|
||||
# Add x-tvg-url and url-tvg attribute for EPG URL
|
||||
m3u_content = f'#EXTM3U x-tvg-url="{epg_url}" url-tvg="{epg_url}"\n'
|
||||
|
|
@ -175,15 +256,30 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
stream_url = first_stream.url
|
||||
else:
|
||||
# Fall back to proxy URL if no direct URL available
|
||||
base_url = request.build_absolute_uri('/')[:-1]
|
||||
stream_url = f"{base_url}/proxy/ts/stream/{channel.uuid}"
|
||||
stream_url = build_absolute_uri_with_port(request, f"/proxy/ts/stream/{channel.uuid}")
|
||||
else:
|
||||
# Standard behavior - use proxy URL
|
||||
base_url = request.build_absolute_uri('/')[:-1]
|
||||
stream_url = f"{base_url}/proxy/ts/stream/{channel.uuid}"
|
||||
stream_url = build_absolute_uri_with_port(request, f"/proxy/ts/stream/{channel.uuid}")
|
||||
|
||||
m3u_content += extinf_line + stream_url + "\n"
|
||||
|
||||
# Cache the generated content for 2 seconds to handle double-GET requests
|
||||
cache.set(content_cache_key, m3u_content, 2)
|
||||
|
||||
# Log system event for M3U download (with deduplication based on client)
|
||||
client_id, client_ip, user_agent = get_client_identifier(request)
|
||||
event_cache_key = f"m3u_download:{user.username if user else 'anonymous'}:{profile_name or 'all'}:{client_id}"
|
||||
if not cache.get(event_cache_key):
|
||||
log_system_event(
|
||||
event_type='m3u_download',
|
||||
profile=profile_name or 'all',
|
||||
user=user.username if user else 'anonymous',
|
||||
channels=channels.count(),
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
cache.set(event_cache_key, True, 2) # Prevent duplicate events for 2 seconds
|
||||
|
||||
response = HttpResponse(m3u_content, content_type="audio/x-mpegurl")
|
||||
response["Content-Disposition"] = 'attachment; filename="channels.m3u"'
|
||||
return response
|
||||
|
|
@ -564,28 +660,39 @@ def generate_custom_dummy_programs(channel_id, channel_name, now, num_days, cust
|
|||
try:
|
||||
# Support various date group names: month, day, year
|
||||
month_str = date_groups.get('month', '')
|
||||
day = int(date_groups.get('day', 1))
|
||||
year = int(date_groups.get('year', now.year)) # Default to current year if not provided
|
||||
day_str = date_groups.get('day', '')
|
||||
year_str = date_groups.get('year', '')
|
||||
|
||||
# Parse day - default to current day if empty or invalid
|
||||
day = int(day_str) if day_str else now.day
|
||||
|
||||
# Parse year - default to current year if empty or invalid (matches frontend behavior)
|
||||
year = int(year_str) if year_str else now.year
|
||||
|
||||
# Parse month - can be numeric (1-12) or text (Jan, January, etc.)
|
||||
month = None
|
||||
if month_str.isdigit():
|
||||
month = int(month_str)
|
||||
else:
|
||||
# Try to parse text month names
|
||||
import calendar
|
||||
month_str_lower = month_str.lower()
|
||||
# Check full month names
|
||||
for i, month_name in enumerate(calendar.month_name):
|
||||
if month_name.lower() == month_str_lower:
|
||||
month = i
|
||||
break
|
||||
# Check abbreviated month names if not found
|
||||
if month is None:
|
||||
for i, month_abbr in enumerate(calendar.month_abbr):
|
||||
if month_abbr.lower() == month_str_lower:
|
||||
if month_str:
|
||||
if month_str.isdigit():
|
||||
month = int(month_str)
|
||||
else:
|
||||
# Try to parse text month names
|
||||
import calendar
|
||||
month_str_lower = month_str.lower()
|
||||
# Check full month names
|
||||
for i, month_name in enumerate(calendar.month_name):
|
||||
if month_name.lower() == month_str_lower:
|
||||
month = i
|
||||
break
|
||||
# Check abbreviated month names if not found
|
||||
if month is None:
|
||||
for i, month_abbr in enumerate(calendar.month_abbr):
|
||||
if month_abbr.lower() == month_str_lower:
|
||||
month = i
|
||||
break
|
||||
|
||||
# Default to current month if not extracted or invalid
|
||||
if month is None:
|
||||
month = now.month
|
||||
|
||||
if month and 1 <= month <= 12 and 1 <= day <= 31:
|
||||
date_info = {'year': year, 'month': month, 'day': day}
|
||||
|
|
@ -1126,8 +1233,22 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
by their associated EPGData record.
|
||||
This version filters data based on the 'days' parameter and sends keep-alives during processing.
|
||||
"""
|
||||
# Check cache for recent identical request (helps with double-GET from browsers)
|
||||
from django.core.cache import cache
|
||||
cache_params = f"{profile_name or 'all'}:{user.username if user else 'anonymous'}:{request.GET.urlencode()}"
|
||||
content_cache_key = f"epg_content:{cache_params}"
|
||||
|
||||
cached_content = cache.get(content_cache_key)
|
||||
if cached_content:
|
||||
logger.debug("Serving EPG from cache")
|
||||
response = HttpResponse(cached_content, content_type="application/xml")
|
||||
response["Content-Disposition"] = 'attachment; filename="Dispatcharr.xml"'
|
||||
response["Cache-Control"] = "no-cache"
|
||||
return response
|
||||
|
||||
def epg_generator():
|
||||
"""Generator function that yields EPG data with keep-alives during processing""" # Send initial HTTP headers as comments (these will be ignored by XML parsers but keep connection alive)
|
||||
"""Generator function that yields EPG data with keep-alives during processing"""
|
||||
# Send initial HTTP headers as comments (these will be ignored by XML parsers but keep connection alive)
|
||||
|
||||
xml_lines = []
|
||||
xml_lines.append('<?xml version="1.0" encoding="UTF-8"?>')
|
||||
|
|
@ -1158,7 +1279,11 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
)
|
||||
else:
|
||||
if profile_name is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
try:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile_name)
|
||||
except ChannelProfile.DoesNotExist:
|
||||
logger.warning("Requested channel profile (%s) during epg generation does not exist", profile_name)
|
||||
raise Http404(f"Channel profile '{profile_name}' not found")
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True,
|
||||
|
|
@ -1190,16 +1315,45 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
now = django_timezone.now()
|
||||
cutoff_date = now + timedelta(days=num_days) if num_days > 0 else None
|
||||
|
||||
# Build collision-free channel number mapping for XC clients (if user is authenticated)
|
||||
# XC clients require integer channel numbers, so we need to ensure no conflicts
|
||||
channel_num_map = {}
|
||||
if user is not None:
|
||||
# This is an XC client - build collision-free mapping
|
||||
used_numbers = set()
|
||||
|
||||
# First pass: assign integers for channels that already have integer numbers
|
||||
for channel in channels:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
num = int(channel.channel_number)
|
||||
channel_num_map[channel.id] = num
|
||||
used_numbers.add(num)
|
||||
|
||||
# Second pass: assign integers for channels with float numbers
|
||||
for channel in channels:
|
||||
if channel.channel_number != int(channel.channel_number):
|
||||
candidate = int(channel.channel_number)
|
||||
while candidate in used_numbers:
|
||||
candidate += 1
|
||||
channel_num_map[channel.id] = candidate
|
||||
used_numbers.add(candidate)
|
||||
|
||||
# Process channels for the <channel> section
|
||||
for channel in channels:
|
||||
# Format channel number as integer if it has no decimal component - same as M3U generation
|
||||
if channel.channel_number is not None:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
formatted_channel_number = int(channel.channel_number)
|
||||
else:
|
||||
formatted_channel_number = channel.channel_number
|
||||
# For XC clients (user is not None), use collision-free integer mapping
|
||||
# For regular clients (user is None), use original formatting logic
|
||||
if user is not None:
|
||||
# XC client - use collision-free integer
|
||||
formatted_channel_number = channel_num_map[channel.id]
|
||||
else:
|
||||
formatted_channel_number = ""
|
||||
# Regular client - format channel number as integer if it has no decimal component
|
||||
if channel.channel_number is not None:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
formatted_channel_number = int(channel.channel_number)
|
||||
else:
|
||||
formatted_channel_number = channel.channel_number
|
||||
else:
|
||||
formatted_channel_number = ""
|
||||
|
||||
# Determine the channel ID based on the selected source
|
||||
if tvg_id_source == 'tvg_id' and channel.tvg_id:
|
||||
|
|
@ -1286,7 +1440,8 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
xml_lines.append(" </channel>")
|
||||
|
||||
# Send all channel definitions
|
||||
yield '\n'.join(xml_lines) + '\n'
|
||||
channel_xml = '\n'.join(xml_lines) + '\n'
|
||||
yield channel_xml
|
||||
xml_lines = [] # Clear to save memory
|
||||
|
||||
# Process programs for each channel
|
||||
|
|
@ -1298,14 +1453,20 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
elif tvg_id_source == 'gracenote' and channel.tvc_guide_stationid:
|
||||
channel_id = channel.tvc_guide_stationid
|
||||
else:
|
||||
# Get formatted channel number
|
||||
if channel.channel_number is not None:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
formatted_channel_number = int(channel.channel_number)
|
||||
else:
|
||||
formatted_channel_number = channel.channel_number
|
||||
# For XC clients (user is not None), use collision-free integer mapping
|
||||
# For regular clients (user is None), use original formatting logic
|
||||
if user is not None:
|
||||
# XC client - use collision-free integer from map
|
||||
formatted_channel_number = channel_num_map[channel.id]
|
||||
else:
|
||||
formatted_channel_number = ""
|
||||
# Regular client - format channel number as before
|
||||
if channel.channel_number is not None:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
formatted_channel_number = int(channel.channel_number)
|
||||
else:
|
||||
formatted_channel_number = channel.channel_number
|
||||
else:
|
||||
formatted_channel_number = ""
|
||||
# Default to channel number
|
||||
channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id)
|
||||
|
||||
|
|
@ -1676,7 +1837,8 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
|
||||
# Send batch when full or send keep-alive
|
||||
if len(program_batch) >= batch_size:
|
||||
yield '\n'.join(program_batch) + '\n'
|
||||
batch_xml = '\n'.join(program_batch) + '\n'
|
||||
yield batch_xml
|
||||
program_batch = []
|
||||
|
||||
# Move to next chunk
|
||||
|
|
@ -1684,12 +1846,40 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
|
||||
# Send remaining programs in batch
|
||||
if program_batch:
|
||||
yield '\n'.join(program_batch) + '\n'
|
||||
batch_xml = '\n'.join(program_batch) + '\n'
|
||||
yield batch_xml
|
||||
|
||||
# Send final closing tag and completion message
|
||||
yield "</tv>\n" # Return streaming response
|
||||
yield "</tv>\n"
|
||||
|
||||
# Log system event for EPG download after streaming completes (with deduplication based on client)
|
||||
client_id, client_ip, user_agent = get_client_identifier(request)
|
||||
event_cache_key = f"epg_download:{user.username if user else 'anonymous'}:{profile_name or 'all'}:{client_id}"
|
||||
if not cache.get(event_cache_key):
|
||||
log_system_event(
|
||||
event_type='epg_download',
|
||||
profile=profile_name or 'all',
|
||||
user=user.username if user else 'anonymous',
|
||||
channels=channels.count(),
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
cache.set(event_cache_key, True, 2) # Prevent duplicate events for 2 seconds
|
||||
|
||||
# Wrapper generator that collects content for caching
|
||||
def caching_generator():
|
||||
collected_content = []
|
||||
for chunk in epg_generator():
|
||||
collected_content.append(chunk)
|
||||
yield chunk
|
||||
# After streaming completes, cache the full content
|
||||
full_content = ''.join(collected_content)
|
||||
cache.set(content_cache_key, full_content, 300)
|
||||
logger.debug("Cached EPG content (%d bytes)", len(full_content))
|
||||
|
||||
# Return streaming response
|
||||
response = StreamingHttpResponse(
|
||||
streaming_content=epg_generator(),
|
||||
streaming_content=caching_generator(),
|
||||
content_type="application/xml"
|
||||
)
|
||||
response["Content-Disposition"] = 'attachment; filename="Dispatcharr.xml"'
|
||||
|
|
@ -1777,45 +1967,31 @@ def xc_player_api(request, full=False):
|
|||
if user is None:
|
||||
return JsonResponse({'error': 'Unauthorized'}, status=401)
|
||||
|
||||
server_info = xc_get_info(request)
|
||||
|
||||
if not action:
|
||||
return JsonResponse(server_info)
|
||||
|
||||
if action == "get_live_categories":
|
||||
return JsonResponse(xc_get_live_categories(user), safe=False)
|
||||
if action == "get_live_streams":
|
||||
elif action == "get_live_streams":
|
||||
return JsonResponse(xc_get_live_streams(request, user, request.GET.get("category_id")), safe=False)
|
||||
if action == "get_short_epg":
|
||||
elif action == "get_short_epg":
|
||||
return JsonResponse(xc_get_epg(request, user, short=True), safe=False)
|
||||
if action == "get_simple_data_table":
|
||||
elif action == "get_simple_data_table":
|
||||
return JsonResponse(xc_get_epg(request, user, short=False), safe=False)
|
||||
|
||||
# Endpoints not implemented, but still provide a response
|
||||
if action in [
|
||||
"get_vod_categories",
|
||||
"get_vod_streams",
|
||||
"get_series",
|
||||
"get_series_categories",
|
||||
"get_series_info",
|
||||
"get_vod_info",
|
||||
]:
|
||||
if action == "get_vod_categories":
|
||||
return JsonResponse(xc_get_vod_categories(user), safe=False)
|
||||
elif action == "get_vod_streams":
|
||||
return JsonResponse(xc_get_vod_streams(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_categories":
|
||||
return JsonResponse(xc_get_series_categories(user), safe=False)
|
||||
elif action == "get_series":
|
||||
return JsonResponse(xc_get_series(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_info":
|
||||
return JsonResponse(xc_get_series_info(request, user, request.GET.get("series_id")), safe=False)
|
||||
elif action == "get_vod_info":
|
||||
return JsonResponse(xc_get_vod_info(request, user, request.GET.get("vod_id")), safe=False)
|
||||
else:
|
||||
return JsonResponse([], safe=False)
|
||||
|
||||
raise Http404()
|
||||
elif action == "get_vod_categories":
|
||||
return JsonResponse(xc_get_vod_categories(user), safe=False)
|
||||
elif action == "get_vod_streams":
|
||||
return JsonResponse(xc_get_vod_streams(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_categories":
|
||||
return JsonResponse(xc_get_series_categories(user), safe=False)
|
||||
elif action == "get_series":
|
||||
return JsonResponse(xc_get_series(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_info":
|
||||
return JsonResponse(xc_get_series_info(request, user, request.GET.get("series_id")), safe=False)
|
||||
elif action == "get_vod_info":
|
||||
return JsonResponse(xc_get_vod_info(request, user, request.GET.get("vod_id")), safe=False)
|
||||
else:
|
||||
# For any other action (including get_account_info or unknown actions),
|
||||
# return server_info/account_info to match provider behavior
|
||||
server_info = xc_get_info(request)
|
||||
return JsonResponse(server_info, safe=False)
|
||||
|
||||
|
||||
def xc_panel_api(request):
|
||||
|
|
@ -1832,12 +2008,34 @@ def xc_panel_api(request):
|
|||
|
||||
def xc_get(request):
|
||||
if not network_access_allowed(request, 'XC_API'):
|
||||
# Log blocked M3U download
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='m3u_blocked',
|
||||
user=request.GET.get('username', 'unknown'),
|
||||
reason='Network access denied (XC API)',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({'error': 'Forbidden'}, status=403)
|
||||
|
||||
action = request.GET.get("action")
|
||||
user = xc_get_user(request)
|
||||
|
||||
if user is None:
|
||||
# Log blocked M3U download due to invalid credentials
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='m3u_blocked',
|
||||
user=request.GET.get('username', 'unknown'),
|
||||
reason='Invalid XC credentials',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({'error': 'Unauthorized'}, status=401)
|
||||
|
||||
return generate_m3u(request, None, user)
|
||||
|
|
@ -1845,11 +2043,33 @@ def xc_get(request):
|
|||
|
||||
def xc_xmltv(request):
|
||||
if not network_access_allowed(request, 'XC_API'):
|
||||
# Log blocked EPG download
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='epg_blocked',
|
||||
user=request.GET.get('username', 'unknown'),
|
||||
reason='Network access denied (XC API)',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({'error': 'Forbidden'}, status=403)
|
||||
|
||||
user = xc_get_user(request)
|
||||
|
||||
if user is None:
|
||||
# Log blocked EPG download due to invalid credentials
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='epg_blocked',
|
||||
user=request.GET.get('username', 'unknown'),
|
||||
reason='Invalid XC credentials',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
return JsonResponse({'error': 'Unauthorized'}, status=401)
|
||||
|
||||
return generate_epg(request, None, user)
|
||||
|
|
@ -1924,10 +2144,38 @@ def xc_get_live_streams(request, user, category_id=None):
|
|||
channel_group__id=category_id, user_level__lte=user.user_level
|
||||
).order_by("channel_number")
|
||||
|
||||
# Build collision-free mapping for XC clients (which require integers)
|
||||
# This ensures channels with float numbers don't conflict with existing integers
|
||||
channel_num_map = {} # Maps channel.id -> integer channel number for XC
|
||||
used_numbers = set() # Track all assigned integer channel numbers
|
||||
|
||||
# First pass: assign integers for channels that already have integer numbers
|
||||
for channel in channels:
|
||||
if channel.channel_number == int(channel.channel_number):
|
||||
# Already an integer, use it directly
|
||||
num = int(channel.channel_number)
|
||||
channel_num_map[channel.id] = num
|
||||
used_numbers.add(num)
|
||||
|
||||
# Second pass: assign integers for channels with float numbers
|
||||
# Find next available number to avoid collisions
|
||||
for channel in channels:
|
||||
if channel.channel_number != int(channel.channel_number):
|
||||
# Has decimal component, need to find available integer
|
||||
# Start from truncated value and increment until we find an unused number
|
||||
candidate = int(channel.channel_number)
|
||||
while candidate in used_numbers:
|
||||
candidate += 1
|
||||
channel_num_map[channel.id] = candidate
|
||||
used_numbers.add(candidate)
|
||||
|
||||
# Build the streams list with the collision-free channel numbers
|
||||
for channel in channels:
|
||||
channel_num_int = channel_num_map[channel.id]
|
||||
|
||||
streams.append(
|
||||
{
|
||||
"num": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number,
|
||||
"num": channel_num_int,
|
||||
"name": channel.name,
|
||||
"stream_type": "live",
|
||||
"stream_id": channel.id,
|
||||
|
|
@ -1939,7 +2187,7 @@ def xc_get_live_streams(request, user, category_id=None):
|
|||
reverse("api:channels:logo-cache", args=[channel.logo.id])
|
||||
)
|
||||
),
|
||||
"epg_channel_id": str(int(channel.channel_number)) if channel.channel_number.is_integer() else str(channel.channel_number),
|
||||
"epg_channel_id": str(channel_num_int),
|
||||
"added": int(channel.created_at.timestamp()),
|
||||
"is_adult": 0,
|
||||
"category_id": str(channel.channel_group.id),
|
||||
|
|
@ -1988,7 +2236,36 @@ def xc_get_epg(request, user, short=False):
|
|||
if not channel:
|
||||
raise Http404()
|
||||
|
||||
limit = request.GET.get('limit', 4)
|
||||
# Calculate the collision-free integer channel number for this channel
|
||||
# This must match the logic in xc_get_live_streams to ensure consistency
|
||||
# Get all channels in the same category for collision detection
|
||||
category_channels = Channel.objects.filter(
|
||||
channel_group=channel.channel_group
|
||||
).order_by("channel_number")
|
||||
|
||||
channel_num_map = {}
|
||||
used_numbers = set()
|
||||
|
||||
# First pass: assign integers for channels that already have integer numbers
|
||||
for ch in category_channels:
|
||||
if ch.channel_number == int(ch.channel_number):
|
||||
num = int(ch.channel_number)
|
||||
channel_num_map[ch.id] = num
|
||||
used_numbers.add(num)
|
||||
|
||||
# Second pass: assign integers for channels with float numbers
|
||||
for ch in category_channels:
|
||||
if ch.channel_number != int(ch.channel_number):
|
||||
candidate = int(ch.channel_number)
|
||||
while candidate in used_numbers:
|
||||
candidate += 1
|
||||
channel_num_map[ch.id] = candidate
|
||||
used_numbers.add(candidate)
|
||||
|
||||
# Get the mapped integer for this specific channel
|
||||
channel_num_int = channel_num_map.get(channel.id, int(channel.channel_number))
|
||||
|
||||
limit = int(request.GET.get('limit', 4))
|
||||
if channel.epg_data:
|
||||
# Check if this is a dummy EPG that generates on-demand
|
||||
if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy':
|
||||
|
|
@ -2020,32 +2297,43 @@ def xc_get_epg(request, user, short=False):
|
|||
programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name, epg_source=None)
|
||||
|
||||
output = {"epg_listings": []}
|
||||
|
||||
for program in programs:
|
||||
id = "0"
|
||||
epg_id = "0"
|
||||
title = program['title'] if isinstance(program, dict) else program.title
|
||||
description = program['description'] if isinstance(program, dict) else program.description
|
||||
|
||||
start = program["start_time"] if isinstance(program, dict) else program.start_time
|
||||
end = program["end_time"] if isinstance(program, dict) else program.end_time
|
||||
|
||||
# For database programs, use actual ID; for generated dummy programs, create synthetic ID
|
||||
if isinstance(program, dict):
|
||||
# Generated dummy program - create unique ID from channel + timestamp
|
||||
program_id = str(abs(hash(f"{channel_id}_{int(start.timestamp())}")))
|
||||
else:
|
||||
# Database program - use actual ID
|
||||
program_id = str(program.id)
|
||||
|
||||
# epg_id refers to the EPG source/channel mapping in XC panels
|
||||
# Use the actual EPGData ID when available, otherwise fall back to 0
|
||||
epg_id = str(channel.epg_data.id) if channel.epg_data else "0"
|
||||
|
||||
program_output = {
|
||||
"id": f"{id}",
|
||||
"epg_id": f"{epg_id}",
|
||||
"title": base64.b64encode(title.encode()).decode(),
|
||||
"id": program_id,
|
||||
"epg_id": epg_id,
|
||||
"title": base64.b64encode((title or "").encode()).decode(),
|
||||
"lang": "",
|
||||
"start": start.strftime("%Y%m%d%H%M%S"),
|
||||
"end": end.strftime("%Y%m%d%H%M%S"),
|
||||
"description": base64.b64encode(description.encode()).decode(),
|
||||
"channel_id": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number,
|
||||
"start_timestamp": int(start.timestamp()),
|
||||
"stop_timestamp": int(end.timestamp()),
|
||||
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"end": end.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"description": base64.b64encode((description or "").encode()).decode(),
|
||||
"channel_id": str(channel_num_int),
|
||||
"start_timestamp": str(int(start.timestamp())),
|
||||
"stop_timestamp": str(int(end.timestamp())),
|
||||
"stream_id": f"{channel_id}",
|
||||
}
|
||||
|
||||
if short == False:
|
||||
program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0
|
||||
program_output["has_archive"] = "0"
|
||||
program_output["has_archive"] = 0
|
||||
|
||||
output['epg_listings'].append(program_output)
|
||||
|
||||
|
|
@ -2250,34 +2538,45 @@ def xc_get_series_info(request, user, series_id):
|
|||
except Exception as e:
|
||||
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
||||
|
||||
# Get episodes for this series from the same M3U account
|
||||
episode_relations = M3UEpisodeRelation.objects.filter(
|
||||
episode__series=series,
|
||||
m3u_account=series_relation.m3u_account
|
||||
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
|
||||
# Get unique episodes for this series that have relations from any active M3U account
|
||||
# We query episodes directly to avoid duplicates when multiple relations exist
|
||||
# (e.g., same episode in different languages/qualities)
|
||||
from apps.vod.models import Episode
|
||||
episodes = Episode.objects.filter(
|
||||
series=series,
|
||||
m3u_relations__m3u_account__is_active=True
|
||||
).distinct().order_by('season_number', 'episode_number')
|
||||
|
||||
# Group episodes by season
|
||||
seasons = {}
|
||||
for relation in episode_relations:
|
||||
episode = relation.episode
|
||||
for episode in episodes:
|
||||
season_num = episode.season_number or 1
|
||||
if season_num not in seasons:
|
||||
seasons[season_num] = []
|
||||
|
||||
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
|
||||
# Get the highest priority relation for this episode (for container_extension, video/audio/bitrate)
|
||||
from apps.vod.models import M3UEpisodeRelation
|
||||
first_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode
|
||||
best_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
video = audio = bitrate = None
|
||||
if first_relation and first_relation.custom_properties:
|
||||
info = first_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
container_extension = "mp4"
|
||||
added_timestamp = str(int(episode.created_at.timestamp()))
|
||||
|
||||
if best_relation:
|
||||
container_extension = best_relation.container_extension or "mp4"
|
||||
added_timestamp = str(int(best_relation.created_at.timestamp()))
|
||||
if best_relation.custom_properties:
|
||||
info = best_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
|
||||
if video is None:
|
||||
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
||||
if audio is None:
|
||||
|
|
@ -2290,8 +2589,8 @@ def xc_get_series_info(request, user, series_id):
|
|||
"season": season_num,
|
||||
"episode_num": episode.episode_number or 0,
|
||||
"title": episode.name,
|
||||
"container_extension": relation.container_extension or "mp4",
|
||||
"added": str(int(relation.created_at.timestamp())),
|
||||
"container_extension": container_extension,
|
||||
"added": added_timestamp,
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
"info": {
|
||||
|
|
@ -2607,7 +2906,7 @@ def xc_series_stream(request, username, password, stream_id, extension):
|
|||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
try:
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||
except M3UEpisodeRelation.DoesNotExist:
|
||||
return JsonResponse({"error": "Episode not found"}, status=404)
|
||||
|
||||
|
|
@ -2640,19 +2939,16 @@ def get_host_and_port(request):
|
|||
if xfh:
|
||||
if ":" in xfh:
|
||||
host, port = xfh.split(":", 1)
|
||||
# Omit standard ports from URLs, or omit if port doesn't match standard for scheme
|
||||
# (e.g., HTTPS but port is 9191 = behind external reverse proxy)
|
||||
# Omit standard ports from URLs
|
||||
if port == standard_port:
|
||||
return host, None
|
||||
# If port doesn't match standard and X-Forwarded-Proto is set, likely behind external RP
|
||||
if request.META.get("HTTP_X_FORWARDED_PROTO"):
|
||||
host = xfh.split(":")[0] # Strip port, will check for proper port below
|
||||
else:
|
||||
return host, port
|
||||
# Non-standard port in X-Forwarded-Host - return it
|
||||
# This handles reverse proxies on non-standard ports (e.g., https://example.com:8443)
|
||||
return host, port
|
||||
else:
|
||||
host = xfh
|
||||
|
||||
# Check for X-Forwarded-Port header (if we didn't already find a valid port)
|
||||
# Check for X-Forwarded-Port header (if we didn't find a port in X-Forwarded-Host)
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
# Omit standard ports from URLs
|
||||
|
|
@ -2670,22 +2966,28 @@ def get_host_and_port(request):
|
|||
else:
|
||||
host = raw_host
|
||||
|
||||
# 3. Check if we're behind a reverse proxy (X-Forwarded-Proto or X-Forwarded-For present)
|
||||
# 3. Check for X-Forwarded-Port (when Host header has no port but we're behind a reverse proxy)
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
# Omit standard ports from URLs
|
||||
return host, None if port == standard_port else port
|
||||
|
||||
# 4. Check if we're behind a reverse proxy (X-Forwarded-Proto or X-Forwarded-For present)
|
||||
# If so, assume standard port for the scheme (don't trust SERVER_PORT in this case)
|
||||
if request.META.get("HTTP_X_FORWARDED_PROTO") or request.META.get("HTTP_X_FORWARDED_FOR"):
|
||||
return host, None
|
||||
|
||||
# 4. Try SERVER_PORT from META (only if NOT behind reverse proxy)
|
||||
# 5. Try SERVER_PORT from META (only if NOT behind reverse proxy)
|
||||
port = request.META.get("SERVER_PORT")
|
||||
if port:
|
||||
# Omit standard ports from URLs
|
||||
return host, None if port == standard_port else port
|
||||
|
||||
# 5. Dev fallback: guess port 5656
|
||||
# 6. Dev fallback: guess port 5656
|
||||
if os.environ.get("DISPATCHARR_ENV") == "dev" or host in ("localhost", "127.0.0.1"):
|
||||
return host, "5656"
|
||||
|
||||
# 6. Final fallback: assume standard port for scheme (omit from URL)
|
||||
# 7. Final fallback: assume standard port for scheme (omit from URL)
|
||||
return host, None
|
||||
|
||||
def build_absolute_uri_with_port(request, path):
|
||||
|
|
|
|||
|
|
@ -34,6 +34,10 @@ class ClientManager:
|
|||
self.heartbeat_interval = ConfigHelper.get('CLIENT_HEARTBEAT_INTERVAL', 10)
|
||||
self.last_heartbeat_time = {}
|
||||
|
||||
# Get ProxyServer instance for ownership checks
|
||||
from .server import ProxyServer
|
||||
self.proxy_server = ProxyServer.get_instance()
|
||||
|
||||
# Start heartbeat thread for local clients
|
||||
self._start_heartbeat_thread()
|
||||
self._registered_clients = set() # Track already registered client IDs
|
||||
|
|
@ -44,9 +48,11 @@ class ClientManager:
|
|||
# Import here to avoid potential import issues
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
import redis
|
||||
from django.conf import settings
|
||||
|
||||
# Get all channels from Redis
|
||||
redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True)
|
||||
# Get all channels from Redis using settings
|
||||
redis_url = getattr(settings, 'REDIS_URL', 'redis://localhost:6379/0')
|
||||
redis_client = redis.Redis.from_url(redis_url, decode_responses=True)
|
||||
all_channels = []
|
||||
cursor = 0
|
||||
|
||||
|
|
@ -337,16 +343,30 @@ class ClientManager:
|
|||
|
||||
self._notify_owner_of_activity()
|
||||
|
||||
# Publish client disconnected event
|
||||
event_data = json.dumps({
|
||||
"event": EventType.CLIENT_DISCONNECTED, # Use constant instead of string
|
||||
"channel_id": self.channel_id,
|
||||
"client_id": client_id,
|
||||
"worker_id": self.worker_id or "unknown",
|
||||
"timestamp": time.time(),
|
||||
"remaining_clients": remaining
|
||||
})
|
||||
self.redis_client.publish(RedisKeys.events_channel(self.channel_id), event_data)
|
||||
# Check if we're the owner - if so, handle locally; if not, publish event
|
||||
am_i_owner = self.proxy_server and self.proxy_server.am_i_owner(self.channel_id)
|
||||
|
||||
if am_i_owner:
|
||||
# We're the owner - handle the disconnect directly
|
||||
logger.debug(f"Owner handling CLIENT_DISCONNECTED for client {client_id} locally (not publishing)")
|
||||
if remaining == 0:
|
||||
# Trigger shutdown check directly via ProxyServer method
|
||||
logger.debug(f"No clients left - triggering immediate shutdown check")
|
||||
# Spawn greenlet to avoid blocking
|
||||
import gevent
|
||||
gevent.spawn(self.proxy_server.handle_client_disconnect, self.channel_id)
|
||||
else:
|
||||
# We're not the owner - publish event so owner can handle it
|
||||
logger.debug(f"Non-owner publishing CLIENT_DISCONNECTED event for client {client_id} on channel {self.channel_id} from worker {self.worker_id}")
|
||||
event_data = json.dumps({
|
||||
"event": EventType.CLIENT_DISCONNECTED,
|
||||
"channel_id": self.channel_id,
|
||||
"client_id": client_id,
|
||||
"worker_id": self.worker_id or "unknown",
|
||||
"timestamp": time.time(),
|
||||
"remaining_clients": remaining
|
||||
})
|
||||
self.redis_client.publish(RedisKeys.events_channel(self.channel_id), event_data)
|
||||
|
||||
# Trigger channel stats update via WebSocket
|
||||
self._trigger_stats_update()
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import gevent # Add gevent import
|
|||
from typing import Dict, Optional, Set
|
||||
from apps.proxy.config import TSConfig as Config
|
||||
from apps.channels.models import Channel, Stream
|
||||
from core.utils import RedisClient
|
||||
from core.utils import RedisClient, log_system_event
|
||||
from redis.exceptions import ConnectionError, TimeoutError
|
||||
from .stream_manager import StreamManager
|
||||
from .stream_buffer import StreamBuffer
|
||||
|
|
@ -194,35 +194,11 @@ class ProxyServer:
|
|||
self.redis_client.delete(disconnect_key)
|
||||
|
||||
elif event_type == EventType.CLIENT_DISCONNECTED:
|
||||
logger.debug(f"Owner received {EventType.CLIENT_DISCONNECTED} event for channel {channel_id}")
|
||||
# Check if any clients remain
|
||||
if channel_id in self.client_managers:
|
||||
# VERIFY REDIS CLIENT COUNT DIRECTLY
|
||||
client_set_key = RedisKeys.clients(channel_id)
|
||||
total = self.redis_client.scard(client_set_key) or 0
|
||||
|
||||
if total == 0:
|
||||
logger.debug(f"No clients left after disconnect event - stopping channel {channel_id}")
|
||||
# Set the disconnect timer for other workers to see
|
||||
disconnect_key = RedisKeys.last_client_disconnect(channel_id)
|
||||
self.redis_client.setex(disconnect_key, 60, str(time.time()))
|
||||
|
||||
# Get configured shutdown delay or default
|
||||
shutdown_delay = ConfigHelper.channel_shutdown_delay()
|
||||
|
||||
if shutdown_delay > 0:
|
||||
logger.info(f"Waiting {shutdown_delay}s before stopping channel...")
|
||||
gevent.sleep(shutdown_delay) # REPLACE: time.sleep(shutdown_delay)
|
||||
|
||||
# Re-check client count before stopping
|
||||
total = self.redis_client.scard(client_set_key) or 0
|
||||
if total > 0:
|
||||
logger.info(f"New clients connected during shutdown delay - aborting shutdown")
|
||||
self.redis_client.delete(disconnect_key)
|
||||
return
|
||||
|
||||
# Stop the channel directly
|
||||
self.stop_channel(channel_id)
|
||||
client_id = data.get("client_id")
|
||||
worker_id = data.get("worker_id")
|
||||
logger.debug(f"Owner received {EventType.CLIENT_DISCONNECTED} event for channel {channel_id}, client {client_id} from worker {worker_id}")
|
||||
# Delegate to dedicated method
|
||||
self.handle_client_disconnect(channel_id)
|
||||
|
||||
|
||||
elif event_type == EventType.STREAM_SWITCH:
|
||||
|
|
@ -646,6 +622,29 @@ class ProxyServer:
|
|||
logger.info(f"Created StreamManager for channel {channel_id} with stream ID {channel_stream_id}")
|
||||
self.stream_managers[channel_id] = stream_manager
|
||||
|
||||
# Log channel start event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=channel_id)
|
||||
|
||||
# Get stream name if stream_id is available
|
||||
stream_name = None
|
||||
if channel_stream_id:
|
||||
try:
|
||||
stream_obj = Stream.objects.get(id=channel_stream_id)
|
||||
stream_name = stream_obj.name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_system_event(
|
||||
'channel_start',
|
||||
channel_id=channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
stream_name=stream_name,
|
||||
stream_id=channel_stream_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log channel start event: {e}")
|
||||
|
||||
# Create client manager with channel_id, redis_client AND worker_id (only if not already exists)
|
||||
if channel_id not in self.client_managers:
|
||||
client_manager = ClientManager(
|
||||
|
|
@ -800,6 +799,44 @@ class ProxyServer:
|
|||
logger.error(f"Error cleaning zombie channel {channel_id}: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def handle_client_disconnect(self, channel_id):
|
||||
"""
|
||||
Handle client disconnect event - check if channel should shut down.
|
||||
Can be called directly by owner or via PubSub from non-owner workers.
|
||||
"""
|
||||
if channel_id not in self.client_managers:
|
||||
return
|
||||
|
||||
try:
|
||||
# VERIFY REDIS CLIENT COUNT DIRECTLY
|
||||
client_set_key = RedisKeys.clients(channel_id)
|
||||
total = self.redis_client.scard(client_set_key) or 0
|
||||
|
||||
if total == 0:
|
||||
logger.debug(f"No clients left after disconnect event - stopping channel {channel_id}")
|
||||
# Set the disconnect timer for other workers to see
|
||||
disconnect_key = RedisKeys.last_client_disconnect(channel_id)
|
||||
self.redis_client.setex(disconnect_key, 60, str(time.time()))
|
||||
|
||||
# Get configured shutdown delay or default
|
||||
shutdown_delay = ConfigHelper.channel_shutdown_delay()
|
||||
|
||||
if shutdown_delay > 0:
|
||||
logger.info(f"Waiting {shutdown_delay}s before stopping channel...")
|
||||
gevent.sleep(shutdown_delay)
|
||||
|
||||
# Re-check client count before stopping
|
||||
total = self.redis_client.scard(client_set_key) or 0
|
||||
if total > 0:
|
||||
logger.info(f"New clients connected during shutdown delay - aborting shutdown")
|
||||
self.redis_client.delete(disconnect_key)
|
||||
return
|
||||
|
||||
# Stop the channel directly
|
||||
self.stop_channel(channel_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling client disconnect for channel {channel_id}: {e}")
|
||||
|
||||
def stop_channel(self, channel_id):
|
||||
"""Stop a channel with proper ownership handling"""
|
||||
try:
|
||||
|
|
@ -847,6 +884,41 @@ class ProxyServer:
|
|||
self.release_ownership(channel_id)
|
||||
logger.info(f"Released ownership of channel {channel_id}")
|
||||
|
||||
# Log channel stop event (after cleanup, before releasing ownership section ends)
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=channel_id)
|
||||
|
||||
# Calculate runtime and get total bytes from metadata
|
||||
runtime = None
|
||||
total_bytes = None
|
||||
if self.redis_client:
|
||||
metadata_key = RedisKeys.channel_metadata(channel_id)
|
||||
metadata = self.redis_client.hgetall(metadata_key)
|
||||
if metadata:
|
||||
# Calculate runtime from init_time
|
||||
if b'init_time' in metadata:
|
||||
try:
|
||||
init_time = float(metadata[b'init_time'].decode('utf-8'))
|
||||
runtime = round(time.time() - init_time, 2)
|
||||
except Exception:
|
||||
pass
|
||||
# Get total bytes transferred
|
||||
if b'total_bytes' in metadata:
|
||||
try:
|
||||
total_bytes = int(metadata[b'total_bytes'].decode('utf-8'))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_system_event(
|
||||
'channel_stop',
|
||||
channel_id=channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
runtime=runtime,
|
||||
total_bytes=total_bytes
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log channel stop event: {e}")
|
||||
|
||||
# Always clean up local resources - WITH SAFE CHECKS
|
||||
if channel_id in self.stream_managers:
|
||||
del self.stream_managers[channel_id]
|
||||
|
|
@ -968,6 +1040,13 @@ class ProxyServer:
|
|||
|
||||
# If in connecting or waiting_for_clients state, check grace period
|
||||
if channel_state in [ChannelState.CONNECTING, ChannelState.WAITING_FOR_CLIENTS]:
|
||||
# Check if channel is already stopping
|
||||
if self.redis_client:
|
||||
stop_key = RedisKeys.channel_stopping(channel_id)
|
||||
if self.redis_client.exists(stop_key):
|
||||
logger.debug(f"Channel {channel_id} is already stopping - skipping monitor shutdown")
|
||||
continue
|
||||
|
||||
# Get connection_ready_time from metadata (indicates if channel reached ready state)
|
||||
connection_ready_time = None
|
||||
if metadata and b'connection_ready_time' in metadata:
|
||||
|
|
@ -1048,6 +1127,13 @@ class ProxyServer:
|
|||
logger.info(f"Channel {channel_id} activated with {total_clients} clients after grace period")
|
||||
# If active and no clients, start normal shutdown procedure
|
||||
elif channel_state not in [ChannelState.CONNECTING, ChannelState.WAITING_FOR_CLIENTS] and total_clients == 0:
|
||||
# Check if channel is already stopping
|
||||
if self.redis_client:
|
||||
stop_key = RedisKeys.channel_stopping(channel_id)
|
||||
if self.redis_client.exists(stop_key):
|
||||
logger.debug(f"Channel {channel_id} is already stopping - skipping monitor shutdown")
|
||||
continue
|
||||
|
||||
# Check if there's a pending no-clients timeout
|
||||
disconnect_key = RedisKeys.last_client_disconnect(channel_id)
|
||||
disconnect_time = None
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ from ..server import ProxyServer
|
|||
from ..redis_keys import RedisKeys
|
||||
from ..constants import EventType, ChannelState, ChannelMetadataField
|
||||
from ..url_utils import get_stream_info_for_switch
|
||||
from core.utils import log_system_event
|
||||
from .log_parsers import LogParserFactory
|
||||
|
||||
logger = logging.getLogger("ts_proxy")
|
||||
|
||||
|
|
@ -418,124 +420,51 @@ class ChannelService:
|
|||
|
||||
@staticmethod
|
||||
def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video", stream_id=None):
|
||||
"""Parse FFmpeg stream info line and store in Redis metadata and database"""
|
||||
"""
|
||||
Parse stream info from FFmpeg/VLC/Streamlink logs and store in Redis/DB.
|
||||
Uses specialized parsers for each streaming tool.
|
||||
"""
|
||||
try:
|
||||
if stream_type == "input":
|
||||
# Example lines:
|
||||
# Input #0, mpegts, from 'http://example.com/stream.ts':
|
||||
# Input #0, hls, from 'http://example.com/stream.m3u8':
|
||||
# Use factory to parse the line based on stream type
|
||||
parsed_data = LogParserFactory.parse(stream_type, stream_info_line)
|
||||
|
||||
if not parsed_data:
|
||||
return
|
||||
|
||||
# Extract input format (e.g., "mpegts", "hls", "flv", etc.)
|
||||
input_match = re.search(r'Input #\d+,\s*([^,]+)', stream_info_line)
|
||||
input_format = input_match.group(1).strip() if input_match else None
|
||||
# Update Redis and database with parsed data
|
||||
ChannelService._update_stream_info_in_redis(
|
||||
channel_id,
|
||||
parsed_data.get('video_codec'),
|
||||
parsed_data.get('resolution'),
|
||||
parsed_data.get('width'),
|
||||
parsed_data.get('height'),
|
||||
parsed_data.get('source_fps'),
|
||||
parsed_data.get('pixel_format'),
|
||||
parsed_data.get('video_bitrate'),
|
||||
parsed_data.get('audio_codec'),
|
||||
parsed_data.get('sample_rate'),
|
||||
parsed_data.get('audio_channels'),
|
||||
parsed_data.get('audio_bitrate'),
|
||||
parsed_data.get('stream_type')
|
||||
)
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if input_format:
|
||||
ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, None, None, None, None, input_format)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(stream_id, stream_type=input_format)
|
||||
|
||||
logger.debug(f"Input format info - Format: {input_format} for channel {channel_id}")
|
||||
|
||||
elif stream_type == "video":
|
||||
# Example line:
|
||||
# Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn
|
||||
|
||||
# Extract video codec (e.g., "h264", "mpeg2video", etc.)
|
||||
codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line)
|
||||
video_codec = codec_match.group(1) if codec_match else None
|
||||
|
||||
# Extract resolution (e.g., "1280x720") - be more specific to avoid hex values
|
||||
# Look for resolution patterns that are realistic video dimensions
|
||||
resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', stream_info_line)
|
||||
if resolution_match:
|
||||
width = int(resolution_match.group(1))
|
||||
height = int(resolution_match.group(2))
|
||||
# Validate that these look like reasonable video dimensions
|
||||
if 100 <= width <= 10000 and 100 <= height <= 10000:
|
||||
resolution = f"{width}x{height}"
|
||||
else:
|
||||
width = height = resolution = None
|
||||
else:
|
||||
width = height = resolution = None
|
||||
|
||||
# Extract source FPS (e.g., "29.97 fps")
|
||||
fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line)
|
||||
source_fps = float(fps_match.group(1)) if fps_match else None
|
||||
|
||||
# Extract pixel format (e.g., "yuv420p")
|
||||
pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line)
|
||||
pixel_format = None
|
||||
if pixel_format_match:
|
||||
pf = pixel_format_match.group(1).strip()
|
||||
# Clean up pixel format (remove extra info in parentheses)
|
||||
if '(' in pf:
|
||||
pf = pf.split('(')[0].strip()
|
||||
pixel_format = pf
|
||||
|
||||
# Extract bitrate if present (e.g., "2000 kb/s")
|
||||
video_bitrate = None
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line)
|
||||
if bitrate_match:
|
||||
video_bitrate = float(bitrate_match.group(1))
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]):
|
||||
ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None, None)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(
|
||||
stream_id,
|
||||
video_codec=video_codec,
|
||||
resolution=resolution,
|
||||
source_fps=source_fps,
|
||||
pixel_format=pixel_format,
|
||||
video_bitrate=video_bitrate
|
||||
)
|
||||
|
||||
logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, "
|
||||
f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, "
|
||||
f"Video Bitrate: {video_bitrate} kb/s")
|
||||
|
||||
elif stream_type == "audio":
|
||||
# Example line:
|
||||
# Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s
|
||||
|
||||
# Extract audio codec (e.g., "aac", "mp3", etc.)
|
||||
codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line)
|
||||
audio_codec = codec_match.group(1) if codec_match else None
|
||||
|
||||
# Extract sample rate (e.g., "48000 Hz")
|
||||
sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line)
|
||||
sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None
|
||||
|
||||
# Extract channel layout (e.g., "stereo", "5.1", "mono")
|
||||
# Look for common channel layouts
|
||||
channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE)
|
||||
channels = channel_match.group(1) if channel_match else None
|
||||
|
||||
# Extract audio bitrate if present (e.g., "64 kb/s")
|
||||
audio_bitrate = None
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line)
|
||||
if bitrate_match:
|
||||
audio_bitrate = float(bitrate_match.group(1))
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]):
|
||||
ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate, None)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(
|
||||
stream_id,
|
||||
audio_codec=audio_codec,
|
||||
sample_rate=sample_rate,
|
||||
audio_channels=channels,
|
||||
audio_bitrate=audio_bitrate
|
||||
)
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(
|
||||
stream_id,
|
||||
video_codec=parsed_data.get('video_codec'),
|
||||
resolution=parsed_data.get('resolution'),
|
||||
source_fps=parsed_data.get('source_fps'),
|
||||
pixel_format=parsed_data.get('pixel_format'),
|
||||
video_bitrate=parsed_data.get('video_bitrate'),
|
||||
audio_codec=parsed_data.get('audio_codec'),
|
||||
sample_rate=parsed_data.get('sample_rate'),
|
||||
audio_channels=parsed_data.get('audio_channels'),
|
||||
audio_bitrate=parsed_data.get('audio_bitrate'),
|
||||
stream_type=parsed_data.get('stream_type')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}")
|
||||
logger.debug(f"Error parsing {stream_type} stream info: {e}")
|
||||
|
||||
@staticmethod
|
||||
def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None, input_format=None):
|
||||
|
|
@ -598,7 +527,7 @@ class ChannelService:
|
|||
def _update_stream_stats_in_db(stream_id, **stats):
|
||||
"""Update stream stats in database"""
|
||||
from django.db import connection
|
||||
|
||||
|
||||
try:
|
||||
from apps.channels.models import Stream
|
||||
from django.utils import timezone
|
||||
|
|
@ -624,7 +553,7 @@ class ChannelService:
|
|||
except Exception as e:
|
||||
logger.error(f"Error updating stream stats in database for stream {stream_id}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
finally:
|
||||
# Always close database connection after update
|
||||
try:
|
||||
|
|
@ -700,6 +629,7 @@ class ChannelService:
|
|||
RedisKeys.events_channel(channel_id),
|
||||
json.dumps(switch_request)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
410
apps/proxy/ts_proxy/services/log_parsers.py
Normal file
410
apps/proxy/ts_proxy/services/log_parsers.py
Normal file
|
|
@ -0,0 +1,410 @@
|
|||
"""Log parsers for FFmpeg, Streamlink, and VLC output."""
|
||||
import re
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseLogParser(ABC):
|
||||
"""Base class for log parsers"""
|
||||
|
||||
# Map of stream_type -> method_name that this parser handles
|
||||
STREAM_TYPE_METHODS: Dict[str, str] = {}
|
||||
|
||||
@abstractmethod
|
||||
def can_parse(self, line: str) -> Optional[str]:
|
||||
"""
|
||||
Check if this parser can handle the line.
|
||||
Returns the stream_type if it can parse, None otherwise.
|
||||
e.g., 'video', 'audio', 'vlc_video', 'vlc_audio', 'streamlink'
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
|
||||
class FFmpegLogParser(BaseLogParser):
|
||||
"""Parser for FFmpeg log output"""
|
||||
|
||||
STREAM_TYPE_METHODS = {
|
||||
'input': 'parse_input_format',
|
||||
'video': 'parse_video_stream',
|
||||
'audio': 'parse_audio_stream'
|
||||
}
|
||||
|
||||
def can_parse(self, line: str) -> Optional[str]:
|
||||
"""Check if this is an FFmpeg line we can parse"""
|
||||
lower = line.lower()
|
||||
|
||||
# Input format detection
|
||||
if lower.startswith('input #'):
|
||||
return 'input'
|
||||
|
||||
# Stream info (only during input phase, but we'll let stream_manager handle phase tracking)
|
||||
if 'stream #' in lower:
|
||||
if 'video:' in lower:
|
||||
return 'video'
|
||||
elif 'audio:' in lower:
|
||||
return 'audio'
|
||||
|
||||
return None
|
||||
|
||||
def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse FFmpeg input format (e.g., mpegts, hls)"""
|
||||
try:
|
||||
input_match = re.search(r'Input #\d+,\s*([^,]+)', line)
|
||||
input_format = input_match.group(1).strip() if input_match else None
|
||||
|
||||
if input_format:
|
||||
logger.debug(f"Input format info - Format: {input_format}")
|
||||
return {'stream_type': input_format}
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing FFmpeg input format: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse FFmpeg video stream info"""
|
||||
try:
|
||||
result = {}
|
||||
|
||||
# Extract codec, resolution, fps, pixel format, bitrate
|
||||
codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', line)
|
||||
if codec_match:
|
||||
result['video_codec'] = codec_match.group(1)
|
||||
|
||||
resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', line)
|
||||
if resolution_match:
|
||||
width = int(resolution_match.group(1))
|
||||
height = int(resolution_match.group(2))
|
||||
if 100 <= width <= 10000 and 100 <= height <= 10000:
|
||||
result['resolution'] = f"{width}x{height}"
|
||||
result['width'] = width
|
||||
result['height'] = height
|
||||
|
||||
fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', line)
|
||||
if fps_match:
|
||||
result['source_fps'] = float(fps_match.group(1))
|
||||
|
||||
pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', line)
|
||||
if pixel_format_match:
|
||||
pf = pixel_format_match.group(1).strip()
|
||||
if '(' in pf:
|
||||
pf = pf.split('(')[0].strip()
|
||||
result['pixel_format'] = pf
|
||||
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', line)
|
||||
if bitrate_match:
|
||||
result['video_bitrate'] = float(bitrate_match.group(1))
|
||||
|
||||
if result:
|
||||
logger.info(f"Video stream info - Codec: {result.get('video_codec')}, "
|
||||
f"Resolution: {result.get('resolution')}, "
|
||||
f"Source FPS: {result.get('source_fps')}, "
|
||||
f"Pixel Format: {result.get('pixel_format')}, "
|
||||
f"Video Bitrate: {result.get('video_bitrate')} kb/s")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing FFmpeg video stream info: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse FFmpeg audio stream info"""
|
||||
try:
|
||||
result = {}
|
||||
|
||||
codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', line)
|
||||
if codec_match:
|
||||
result['audio_codec'] = codec_match.group(1)
|
||||
|
||||
sample_rate_match = re.search(r'(\d+)\s*Hz', line)
|
||||
if sample_rate_match:
|
||||
result['sample_rate'] = int(sample_rate_match.group(1))
|
||||
|
||||
channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', line, re.IGNORECASE)
|
||||
if channel_match:
|
||||
result['audio_channels'] = channel_match.group(1)
|
||||
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', line)
|
||||
if bitrate_match:
|
||||
result['audio_bitrate'] = float(bitrate_match.group(1))
|
||||
|
||||
if result:
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing FFmpeg audio stream info: {e}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class VLCLogParser(BaseLogParser):
|
||||
"""Parser for VLC log output"""
|
||||
|
||||
STREAM_TYPE_METHODS = {
|
||||
'vlc_video': 'parse_video_stream',
|
||||
'vlc_audio': 'parse_audio_stream'
|
||||
}
|
||||
|
||||
def can_parse(self, line: str) -> Optional[str]:
|
||||
"""Check if this is a VLC line we can parse"""
|
||||
lower = line.lower()
|
||||
|
||||
# VLC TS demux codec detection
|
||||
if 'ts demux debug' in lower and 'type=' in lower:
|
||||
if 'video' in lower:
|
||||
return 'vlc_video'
|
||||
elif 'audio' in lower:
|
||||
return 'vlc_audio'
|
||||
|
||||
# VLC decoder output
|
||||
if 'decoder' in lower and ('channels:' in lower or 'samplerate:' in lower or 'x' in line or 'fps' in lower):
|
||||
if 'audio' in lower or 'channels:' in lower or 'samplerate:' in lower:
|
||||
return 'vlc_audio'
|
||||
else:
|
||||
return 'vlc_video'
|
||||
|
||||
# VLC transcode output for resolution/FPS
|
||||
if 'stream_out_transcode' in lower and ('source fps' in lower or ('source ' in lower and 'x' in line)):
|
||||
return 'vlc_video'
|
||||
|
||||
return None
|
||||
|
||||
def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
return None
|
||||
|
||||
def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse VLC TS demux output and decoder info for video"""
|
||||
try:
|
||||
lower = line.lower()
|
||||
result = {}
|
||||
|
||||
# Codec detection from TS demux
|
||||
video_codec_map = {
|
||||
('avc', 'h.264', 'type=0x1b'): "h264",
|
||||
('hevc', 'h.265', 'type=0x24'): "hevc",
|
||||
('mpeg-2', 'type=0x02'): "mpeg2video",
|
||||
('mpeg-4', 'type=0x10'): "mpeg4"
|
||||
}
|
||||
|
||||
for patterns, codec in video_codec_map.items():
|
||||
if any(p in lower for p in patterns):
|
||||
result['video_codec'] = codec
|
||||
break
|
||||
|
||||
# Extract FPS from transcode output: "source fps 30/1"
|
||||
fps_fraction_match = re.search(r'source fps\s+(\d+)/(\d+)', lower)
|
||||
if fps_fraction_match:
|
||||
numerator = int(fps_fraction_match.group(1))
|
||||
denominator = int(fps_fraction_match.group(2))
|
||||
if denominator > 0:
|
||||
result['source_fps'] = numerator / denominator
|
||||
|
||||
# Extract resolution from transcode output: "source 1280x720"
|
||||
source_res_match = re.search(r'source\s+(\d{3,4})x(\d{3,4})', lower)
|
||||
if source_res_match:
|
||||
width = int(source_res_match.group(1))
|
||||
height = int(source_res_match.group(2))
|
||||
if 100 <= width <= 10000 and 100 <= height <= 10000:
|
||||
result['resolution'] = f"{width}x{height}"
|
||||
result['width'] = width
|
||||
result['height'] = height
|
||||
else:
|
||||
# Fallback: generic resolution pattern
|
||||
resolution_match = re.search(r'(\d{3,4})x(\d{3,4})', line)
|
||||
if resolution_match:
|
||||
width = int(resolution_match.group(1))
|
||||
height = int(resolution_match.group(2))
|
||||
if 100 <= width <= 10000 and 100 <= height <= 10000:
|
||||
result['resolution'] = f"{width}x{height}"
|
||||
result['width'] = width
|
||||
result['height'] = height
|
||||
|
||||
# Fallback: try to extract FPS from generic format
|
||||
if 'source_fps' not in result:
|
||||
fps_match = re.search(r'(\d+\.?\d*)\s*fps', lower)
|
||||
if fps_match:
|
||||
result['source_fps'] = float(fps_match.group(1))
|
||||
|
||||
return result if result else None
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing VLC video stream info: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse VLC TS demux output and decoder info for audio"""
|
||||
try:
|
||||
lower = line.lower()
|
||||
result = {}
|
||||
|
||||
# Codec detection from TS demux
|
||||
audio_codec_map = {
|
||||
('type=0xf', 'adts'): "aac",
|
||||
('type=0x03', 'type=0x04'): "mp3",
|
||||
('type=0x06', 'type=0x81'): "ac3",
|
||||
('type=0x0b', 'lpcm'): "pcm"
|
||||
}
|
||||
|
||||
for patterns, codec in audio_codec_map.items():
|
||||
if any(p in lower for p in patterns):
|
||||
result['audio_codec'] = codec
|
||||
break
|
||||
|
||||
# VLC decoder format: "AAC channels: 2 samplerate: 48000"
|
||||
if 'channels:' in lower:
|
||||
channels_match = re.search(r'channels:\s*(\d+)', lower)
|
||||
if channels_match:
|
||||
num_channels = int(channels_match.group(1))
|
||||
# Convert number to name
|
||||
channel_names = {1: 'mono', 2: 'stereo', 6: '5.1', 8: '7.1'}
|
||||
result['audio_channels'] = channel_names.get(num_channels, str(num_channels))
|
||||
|
||||
if 'samplerate:' in lower:
|
||||
samplerate_match = re.search(r'samplerate:\s*(\d+)', lower)
|
||||
if samplerate_match:
|
||||
result['sample_rate'] = int(samplerate_match.group(1))
|
||||
|
||||
# Try to extract sample rate (Hz format)
|
||||
sample_rate_match = re.search(r'(\d+)\s*hz', lower)
|
||||
if sample_rate_match and 'sample_rate' not in result:
|
||||
result['sample_rate'] = int(sample_rate_match.group(1))
|
||||
|
||||
# Try to extract channels (word format)
|
||||
if 'audio_channels' not in result:
|
||||
channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', lower)
|
||||
if channel_match:
|
||||
result['audio_channels'] = channel_match.group(1)
|
||||
|
||||
return result if result else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[VLC AUDIO PARSER] Error parsing VLC audio stream info: {e}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class StreamlinkLogParser(BaseLogParser):
|
||||
"""Parser for Streamlink log output"""
|
||||
|
||||
STREAM_TYPE_METHODS = {
|
||||
'streamlink': 'parse_video_stream'
|
||||
}
|
||||
|
||||
def can_parse(self, line: str) -> Optional[str]:
|
||||
"""Check if this is a Streamlink line we can parse"""
|
||||
lower = line.lower()
|
||||
|
||||
if 'opening stream:' in lower or 'available streams:' in lower:
|
||||
return 'streamlink'
|
||||
|
||||
return None
|
||||
|
||||
def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
return None
|
||||
|
||||
def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""Parse Streamlink quality/resolution"""
|
||||
try:
|
||||
quality_match = re.search(r'(\d+p|\d+x\d+)', line)
|
||||
if quality_match:
|
||||
quality = quality_match.group(1)
|
||||
|
||||
if 'x' in quality:
|
||||
resolution = quality
|
||||
width, height = map(int, quality.split('x'))
|
||||
else:
|
||||
resolutions = {
|
||||
'2160p': ('3840x2160', 3840, 2160),
|
||||
'1080p': ('1920x1080', 1920, 1080),
|
||||
'720p': ('1280x720', 1280, 720),
|
||||
'480p': ('854x480', 854, 480),
|
||||
'360p': ('640x360', 640, 360)
|
||||
}
|
||||
resolution, width, height = resolutions.get(quality, ('1920x1080', 1920, 1080))
|
||||
|
||||
return {
|
||||
'video_codec': 'h264',
|
||||
'resolution': resolution,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'pixel_format': 'yuv420p'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing Streamlink video info: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]:
|
||||
return None
|
||||
|
||||
|
||||
class LogParserFactory:
|
||||
"""Factory to get the appropriate log parser"""
|
||||
|
||||
_parsers = {
|
||||
'ffmpeg': FFmpegLogParser(),
|
||||
'vlc': VLCLogParser(),
|
||||
'streamlink': StreamlinkLogParser()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _get_parser_and_method(cls, stream_type: str) -> Optional[tuple[BaseLogParser, str]]:
|
||||
"""Determine parser and method from stream_type"""
|
||||
# Check each parser to see if it handles this stream_type
|
||||
for parser in cls._parsers.values():
|
||||
method_name = parser.STREAM_TYPE_METHODS.get(stream_type)
|
||||
if method_name:
|
||||
return (parser, method_name)
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def parse(cls, stream_type: str, line: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Parse a log line based on stream type.
|
||||
Returns parsed data or None if parsing fails.
|
||||
"""
|
||||
result = cls._get_parser_and_method(stream_type)
|
||||
if not result:
|
||||
return None
|
||||
|
||||
parser, method_name = result
|
||||
method = getattr(parser, method_name, None)
|
||||
if method:
|
||||
return method(line)
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def auto_parse(cls, line: str) -> Optional[tuple[str, Dict[str, Any]]]:
|
||||
"""
|
||||
Automatically detect which parser can handle this line and parse it.
|
||||
Returns (stream_type, parsed_data) or None if no parser can handle it.
|
||||
"""
|
||||
# Try each parser to see if it can handle this line
|
||||
for parser in cls._parsers.values():
|
||||
stream_type = parser.can_parse(line)
|
||||
if stream_type:
|
||||
# Parser can handle this line, now parse it
|
||||
parsed_data = cls.parse(stream_type, line)
|
||||
if parsed_data:
|
||||
return (stream_type, parsed_data)
|
||||
|
||||
return None
|
||||
|
|
@ -8,6 +8,8 @@ import logging
|
|||
import threading
|
||||
import gevent # Add this import at the top of your file
|
||||
from apps.proxy.config import TSConfig as Config
|
||||
from apps.channels.models import Channel
|
||||
from core.utils import log_system_event
|
||||
from .server import ProxyServer
|
||||
from .utils import create_ts_packet, get_logger
|
||||
from .redis_keys import RedisKeys
|
||||
|
|
@ -88,6 +90,20 @@ class StreamGenerator:
|
|||
if not self._setup_streaming():
|
||||
return
|
||||
|
||||
# Log client connect event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'client_connect',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
client_ip=self.client_ip,
|
||||
client_id=self.client_id,
|
||||
user_agent=self.client_user_agent[:100] if self.client_user_agent else None
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log client connect event: {e}")
|
||||
|
||||
# Main streaming loop
|
||||
for chunk in self._stream_data_generator():
|
||||
yield chunk
|
||||
|
|
@ -439,6 +455,22 @@ class StreamGenerator:
|
|||
total_clients = client_manager.get_total_client_count()
|
||||
logger.info(f"[{self.client_id}] Disconnected after {elapsed:.2f}s (local: {local_clients}, total: {total_clients})")
|
||||
|
||||
# Log client disconnect event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'client_disconnect',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
client_ip=self.client_ip,
|
||||
client_id=self.client_id,
|
||||
user_agent=self.client_user_agent[:100] if self.client_user_agent else None,
|
||||
duration=round(elapsed, 2),
|
||||
bytes_sent=self.bytes_sent
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log client disconnect event: {e}")
|
||||
|
||||
# Schedule channel shutdown if no clients left
|
||||
if not stream_released: # Only if we haven't already released the stream
|
||||
self._schedule_channel_shutdown_if_needed(local_clients)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from apps.proxy.config import TSConfig as Config
|
|||
from apps.channels.models import Channel, Stream
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from core.models import UserAgent, CoreSettings
|
||||
from core.utils import log_system_event
|
||||
from .stream_buffer import StreamBuffer
|
||||
from .utils import detect_stream_type, get_logger
|
||||
from .redis_keys import RedisKeys
|
||||
|
|
@ -106,6 +107,10 @@ class StreamManager:
|
|||
# Add this flag for tracking transcoding process status
|
||||
self.transcode_process_active = False
|
||||
|
||||
# Track stream command for efficient log parser routing
|
||||
self.stream_command = None
|
||||
self.parser_type = None # Will be set when transcode process starts
|
||||
|
||||
# Add tracking for data throughput
|
||||
self.bytes_processed = 0
|
||||
self.last_bytes_update = time.time()
|
||||
|
|
@ -260,6 +265,20 @@ class StreamManager:
|
|||
# Store connection start time to measure success duration
|
||||
connection_start_time = time.time()
|
||||
|
||||
# Log reconnection event if this is a retry (not first attempt)
|
||||
if self.retry_count > 0:
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_reconnect',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
attempt=self.retry_count + 1,
|
||||
max_attempts=self.max_retries
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log reconnection event: {e}")
|
||||
|
||||
# Successfully connected - read stream data until disconnect/error
|
||||
self._process_stream_data()
|
||||
# If we get here, the connection was closed/failed
|
||||
|
|
@ -289,6 +308,20 @@ class StreamManager:
|
|||
if self.retry_count >= self.max_retries:
|
||||
url_failed = True
|
||||
logger.warning(f"Maximum retry attempts ({self.max_retries}) reached for URL: {self.url} for channel: {self.channel_id}")
|
||||
|
||||
# Log connection error event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_error',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
error_type='connection_failed',
|
||||
url=self.url[:100] if self.url else None,
|
||||
attempts=self.max_retries
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log connection error event: {e}")
|
||||
else:
|
||||
# Wait with exponential backoff before retrying
|
||||
timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds
|
||||
|
|
@ -302,6 +335,21 @@ class StreamManager:
|
|||
|
||||
if self.retry_count >= self.max_retries:
|
||||
url_failed = True
|
||||
|
||||
# Log connection error event with exception details
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_error',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
error_type='connection_exception',
|
||||
error_message=str(e)[:200],
|
||||
url=self.url[:100] if self.url else None,
|
||||
attempts=self.max_retries
|
||||
)
|
||||
except Exception as log_error:
|
||||
logger.error(f"Could not log connection error event: {log_error}")
|
||||
else:
|
||||
# Wait with exponential backoff before retrying
|
||||
timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds
|
||||
|
|
@ -432,6 +480,21 @@ class StreamManager:
|
|||
# Build and start transcode command
|
||||
self.transcode_cmd = stream_profile.build_command(self.url, self.user_agent)
|
||||
|
||||
# Store stream command for efficient log parser routing
|
||||
self.stream_command = stream_profile.command
|
||||
# Map actual commands to parser types for direct routing
|
||||
command_to_parser = {
|
||||
'ffmpeg': 'ffmpeg',
|
||||
'cvlc': 'vlc',
|
||||
'vlc': 'vlc',
|
||||
'streamlink': 'streamlink'
|
||||
}
|
||||
self.parser_type = command_to_parser.get(self.stream_command.lower())
|
||||
if self.parser_type:
|
||||
logger.debug(f"Using {self.parser_type} parser for log parsing (command: {self.stream_command})")
|
||||
else:
|
||||
logger.debug(f"Unknown stream command '{self.stream_command}', will use auto-detection for log parsing")
|
||||
|
||||
# For UDP streams, remove any user_agent parameters from the command
|
||||
if hasattr(self, 'stream_type') and self.stream_type == StreamType.UDP:
|
||||
# Filter out any arguments that contain the user_agent value or related headers
|
||||
|
|
@ -601,35 +664,51 @@ class StreamManager:
|
|||
if content_lower.startswith('output #') or 'encoder' in content_lower:
|
||||
self.ffmpeg_input_phase = False
|
||||
|
||||
# Only parse stream info if we're still in the input phase
|
||||
if ("stream #" in content_lower and
|
||||
("video:" in content_lower or "audio:" in content_lower) and
|
||||
self.ffmpeg_input_phase):
|
||||
# Route to appropriate parser based on known command type
|
||||
from .services.log_parsers import LogParserFactory
|
||||
from .services.channel_service import ChannelService
|
||||
|
||||
from .services.channel_service import ChannelService
|
||||
if "video:" in content_lower:
|
||||
ChannelService.parse_and_store_stream_info(self.channel_id, content, "video", self.current_stream_id)
|
||||
elif "audio:" in content_lower:
|
||||
ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio", self.current_stream_id)
|
||||
parse_result = None
|
||||
|
||||
# If we know the parser type, use direct routing for efficiency
|
||||
if self.parser_type:
|
||||
# Get the appropriate parser and check what it can parse
|
||||
parser = LogParserFactory._parsers.get(self.parser_type)
|
||||
if parser:
|
||||
stream_type = parser.can_parse(content)
|
||||
if stream_type:
|
||||
# Parser can handle this line, parse it directly
|
||||
parsed_data = LogParserFactory.parse(stream_type, content)
|
||||
if parsed_data:
|
||||
parse_result = (stream_type, parsed_data)
|
||||
else:
|
||||
# Unknown command type - use auto-detection as fallback
|
||||
parse_result = LogParserFactory.auto_parse(content)
|
||||
|
||||
if parse_result:
|
||||
stream_type, parsed_data = parse_result
|
||||
# For FFmpeg, only parse during input phase
|
||||
if stream_type in ['video', 'audio', 'input']:
|
||||
if self.ffmpeg_input_phase:
|
||||
ChannelService.parse_and_store_stream_info(self.channel_id, content, stream_type, self.current_stream_id)
|
||||
else:
|
||||
# VLC and Streamlink can be parsed anytime
|
||||
ChannelService.parse_and_store_stream_info(self.channel_id, content, stream_type, self.current_stream_id)
|
||||
|
||||
# Determine log level based on content
|
||||
if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']):
|
||||
logger.error(f"FFmpeg stderr for channel {self.channel_id}: {content}")
|
||||
logger.error(f"Stream process error for channel {self.channel_id}: {content}")
|
||||
elif any(keyword in content_lower for keyword in ['warning', 'deprecated', 'ignoring']):
|
||||
logger.warning(f"FFmpeg stderr for channel {self.channel_id}: {content}")
|
||||
logger.warning(f"Stream process warning for channel {self.channel_id}: {content}")
|
||||
elif content.startswith('frame=') or 'fps=' in content or 'speed=' in content:
|
||||
# Stats lines - log at trace level to avoid spam
|
||||
logger.trace(f"FFmpeg stats for channel {self.channel_id}: {content}")
|
||||
logger.trace(f"Stream stats for channel {self.channel_id}: {content}")
|
||||
elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']):
|
||||
# Stream info - log at info level
|
||||
logger.info(f"FFmpeg info for channel {self.channel_id}: {content}")
|
||||
if content.startswith('Input #0'):
|
||||
# If it's input 0, parse stream info
|
||||
from .services.channel_service import ChannelService
|
||||
ChannelService.parse_and_store_stream_info(self.channel_id, content, "input", self.current_stream_id)
|
||||
logger.info(f"Stream info for channel {self.channel_id}: {content}")
|
||||
else:
|
||||
# Everything else at debug level
|
||||
logger.debug(f"FFmpeg stderr for channel {self.channel_id}: {content}")
|
||||
logger.debug(f"Stream process output for channel {self.channel_id}: {content}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error logging stderr content for channel {self.channel_id}: {e}")
|
||||
|
|
@ -702,6 +781,19 @@ class StreamManager:
|
|||
# Reset buffering state
|
||||
self.buffering = False
|
||||
self.buffering_start_time = None
|
||||
|
||||
# Log failover event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_failover',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
reason='buffering_timeout',
|
||||
duration=buffering_duration
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log failover event: {e}")
|
||||
else:
|
||||
logger.error(f"Failed to switch to next stream for channel {self.channel_id} after buffering timeout")
|
||||
else:
|
||||
|
|
@ -709,6 +801,19 @@ class StreamManager:
|
|||
self.buffering = True
|
||||
self.buffering_start_time = time.time()
|
||||
logger.warning(f"Buffering started for channel {self.channel_id} - speed: {ffmpeg_speed}x")
|
||||
|
||||
# Log system event for buffering
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_buffering',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
speed=ffmpeg_speed
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log buffering event: {e}")
|
||||
|
||||
# Log buffering warning
|
||||
logger.debug(f"FFmpeg speed on channel {self.channel_id} is below {self.buffering_speed} ({ffmpeg_speed}x) - buffering detected")
|
||||
# Set channel state to buffering
|
||||
|
|
@ -1004,6 +1109,19 @@ class StreamManager:
|
|||
except Exception as e:
|
||||
logger.warning(f"Failed to reset buffer position: {e}")
|
||||
|
||||
# Log stream switch event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'stream_switch',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
new_url=new_url[:100] if new_url else None,
|
||||
stream_id=stream_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log stream switch event: {e}")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error during URL update for channel {self.channel_id}: {e}", exc_info=True)
|
||||
|
|
@ -1122,6 +1240,19 @@ class StreamManager:
|
|||
if connection_result:
|
||||
self.connection_start_time = time.time()
|
||||
logger.info(f"Reconnect successful for channel {self.channel_id}")
|
||||
|
||||
# Log reconnection event
|
||||
try:
|
||||
channel_obj = Channel.objects.get(uuid=self.channel_id)
|
||||
log_system_event(
|
||||
'channel_reconnect',
|
||||
channel_id=self.channel_id,
|
||||
channel_name=channel_obj.name,
|
||||
reason='health_monitor'
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not log reconnection event: {e}")
|
||||
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"Reconnect failed for channel {self.channel_id}")
|
||||
|
|
@ -1199,25 +1330,17 @@ class StreamManager:
|
|||
logger.debug(f"Error closing socket for channel {self.channel_id}: {e}")
|
||||
pass
|
||||
|
||||
# Enhanced transcode process cleanup with more aggressive termination
|
||||
# Enhanced transcode process cleanup with immediate termination
|
||||
if self.transcode_process:
|
||||
try:
|
||||
# First try polite termination
|
||||
logger.debug(f"Terminating transcode process for channel {self.channel_id}")
|
||||
self.transcode_process.terminate()
|
||||
logger.debug(f"Killing transcode process for channel {self.channel_id}")
|
||||
self.transcode_process.kill()
|
||||
|
||||
# Give it a short time to terminate gracefully
|
||||
# Give it a very short time to die
|
||||
try:
|
||||
self.transcode_process.wait(timeout=1.0)
|
||||
self.transcode_process.wait(timeout=0.5)
|
||||
except subprocess.TimeoutExpired:
|
||||
# If it doesn't terminate quickly, kill it
|
||||
logger.warning(f"Transcode process didn't terminate within timeout, killing forcefully for channel {self.channel_id}")
|
||||
self.transcode_process.kill()
|
||||
|
||||
try:
|
||||
self.transcode_process.wait(timeout=1.0)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.error(f"Failed to kill transcode process even with force for channel {self.channel_id}")
|
||||
logger.error(f"Failed to kill transcode process even with force for channel {self.channel_id}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Error terminating transcode process for channel {self.channel_id}: {e}")
|
||||
|
||||
|
|
|
|||
|
|
@ -39,6 +39,8 @@ def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]
|
|||
|
||||
# Handle direct stream preview (custom streams)
|
||||
if isinstance(channel_or_stream, Stream):
|
||||
from core.utils import RedisClient
|
||||
|
||||
stream = channel_or_stream
|
||||
logger.info(f"Previewing stream directly: {stream.id} ({stream.name})")
|
||||
|
||||
|
|
@ -48,12 +50,43 @@ def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]
|
|||
logger.error(f"Stream {stream.id} has no M3U account")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the default profile for this M3U account (custom streams use default)
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
# Get active profiles for this M3U account
|
||||
m3u_profiles = m3u_account.profiles.filter(is_active=True)
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not profile:
|
||||
logger.error(f"No default profile found for M3U account {m3u_account.id}")
|
||||
if not default_profile:
|
||||
logger.error(f"No default active profile found for M3U account {m3u_account.id}")
|
||||
return None, None, False, None
|
||||
|
||||
# Check profiles in order: default first, then others
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
|
||||
# Try to find an available profile with connection capacity
|
||||
redis_client = RedisClient.get_client()
|
||||
selected_profile = None
|
||||
|
||||
for profile in profiles:
|
||||
logger.info(profile)
|
||||
|
||||
# Check connection availability
|
||||
if redis_client:
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Check if profile has available slots (or unlimited connections)
|
||||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
selected_profile = profile
|
||||
logger.debug(f"Selected profile {profile.id} with {current_connections}/{profile.max_streams} connections for stream preview")
|
||||
break
|
||||
else:
|
||||
logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}")
|
||||
else:
|
||||
# No Redis available, use first active profile
|
||||
selected_profile = profile
|
||||
break
|
||||
|
||||
if not selected_profile:
|
||||
logger.error(f"No profiles available with connection capacity for M3U account {m3u_account.id}")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the appropriate user agent
|
||||
|
|
@ -62,8 +95,8 @@ def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]
|
|||
stream_user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
|
||||
logger.debug(f"No user agent found for account, using default: {stream_user_agent}")
|
||||
|
||||
# Get stream URL (no transformation for custom streams)
|
||||
stream_url = stream.url
|
||||
# Get stream URL with the selected profile's URL transformation
|
||||
stream_url = transform_url(stream.url, selected_profile.search_pattern, selected_profile.replace_pattern)
|
||||
|
||||
# Check if the stream has its own stream_profile set, otherwise use default
|
||||
if stream.stream_profile:
|
||||
|
|
@ -429,16 +462,21 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
session.headers.update(headers)
|
||||
|
||||
# Make HEAD request first as it's faster and doesn't download content
|
||||
head_response = session.head(
|
||||
url,
|
||||
timeout=timeout,
|
||||
allow_redirects=True
|
||||
)
|
||||
head_request_success = True
|
||||
try:
|
||||
head_response = session.head(
|
||||
url,
|
||||
timeout=timeout,
|
||||
allow_redirects=True
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
head_request_success = False
|
||||
logger.warning(f"Request error (HEAD), assuming HEAD not supported: {str(e)}")
|
||||
|
||||
# If HEAD not supported, server will return 405 or other error
|
||||
if 200 <= head_response.status_code < 300:
|
||||
if head_request_success and (200 <= head_response.status_code < 300):
|
||||
# HEAD request successful
|
||||
return True, head_response.url, head_response.status_code, "Valid (HEAD request)"
|
||||
return True, url, head_response.status_code, "Valid (HEAD request)"
|
||||
|
||||
# Try a GET request with stream=True to avoid downloading all content
|
||||
get_response = session.get(
|
||||
|
|
@ -451,7 +489,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
# IMPORTANT: Check status code first before checking content
|
||||
if not (200 <= get_response.status_code < 300):
|
||||
logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}")
|
||||
return False, get_response.url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
||||
return False, url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
||||
|
||||
# Only check content if status code is valid
|
||||
try:
|
||||
|
|
@ -505,7 +543,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
get_response.close()
|
||||
|
||||
# If we have content, consider it valid even with unrecognized content type
|
||||
return is_valid, get_response.url, get_response.status_code, message
|
||||
return is_valid, url, get_response.status_code, message
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
return False, url, 0, "Timeout connecting to stream"
|
||||
|
|
|
|||
|
|
@ -97,7 +97,11 @@ class PersistentVODConnection:
|
|||
# First check if we have a pre-stored content length from HEAD request
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{self.session_id}"
|
||||
stored_length = r.get(content_length_key)
|
||||
if stored_length:
|
||||
|
|
|
|||
|
|
@ -24,6 +24,11 @@ from apps.m3u.models import M3UAccountProfile
|
|||
logger = logging.getLogger("vod_proxy")
|
||||
|
||||
|
||||
def get_vod_client_stop_key(client_id):
|
||||
"""Get the Redis key for signaling a VOD client to stop"""
|
||||
return f"vod_proxy:client:{client_id}:stop"
|
||||
|
||||
|
||||
def infer_content_type_from_url(url: str) -> Optional[str]:
|
||||
"""
|
||||
Infer MIME type from file extension in URL
|
||||
|
|
@ -352,12 +357,12 @@ class RedisBackedVODConnection:
|
|||
|
||||
logger.info(f"[{self.session_id}] Making request #{state.request_count} to {'final' if state.final_url else 'original'} URL")
|
||||
|
||||
# Make request
|
||||
# Make request (10s connect, 10s read timeout - keeps lock time reasonable if client disconnects)
|
||||
response = self.local_session.get(
|
||||
target_url,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
timeout=(10, 30),
|
||||
timeout=(10, 10),
|
||||
allow_redirects=allow_redirects
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
|
@ -707,6 +712,10 @@ class MultiWorkerVODConnectionManager:
|
|||
content_name = content_obj.name if hasattr(content_obj, 'name') else str(content_obj)
|
||||
client_id = session_id
|
||||
|
||||
# Track whether we incremented profile connections (for cleanup on error)
|
||||
profile_connections_incremented = False
|
||||
redis_connection = None
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed streaming request for {content_type} {content_name}")
|
||||
|
||||
try:
|
||||
|
|
@ -797,6 +806,7 @@ class MultiWorkerVODConnectionManager:
|
|||
|
||||
# Increment profile connections after successful connection creation
|
||||
self._increment_profile_connections(m3u_profile)
|
||||
profile_connections_incremented = True
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Created consolidated connection with session metadata")
|
||||
else:
|
||||
|
|
@ -832,6 +842,7 @@ class MultiWorkerVODConnectionManager:
|
|||
# Create streaming generator
|
||||
def stream_generator():
|
||||
decremented = False
|
||||
stop_signal_detected = False
|
||||
try:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
||||
|
||||
|
|
@ -846,14 +857,25 @@ class MultiWorkerVODConnectionManager:
|
|||
bytes_sent = 0
|
||||
chunk_count = 0
|
||||
|
||||
# Get the stop signal key for this client
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
|
||||
for chunk in upstream_response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
yield chunk
|
||||
bytes_sent += len(chunk)
|
||||
chunk_count += 1
|
||||
|
||||
# Update activity every 100 chunks in consolidated connection state
|
||||
# Check for stop signal every 100 chunks
|
||||
if chunk_count % 100 == 0:
|
||||
# Check if stop signal has been set
|
||||
if self.redis_client and self.redis_client.exists(stop_key):
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stop signal detected, terminating stream")
|
||||
# Delete the stop key
|
||||
self.redis_client.delete(stop_key)
|
||||
stop_signal_detected = True
|
||||
break
|
||||
|
||||
# Update the connection state
|
||||
logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}")
|
||||
if redis_connection._acquire_lock():
|
||||
|
|
@ -867,7 +889,10 @@ class MultiWorkerVODConnectionManager:
|
|||
finally:
|
||||
redis_connection._release_lock()
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
if stop_signal_detected:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stream stopped by signal: {bytes_sent} bytes sent")
|
||||
else:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
|
||||
|
|
@ -1004,6 +1029,19 @@ class MultiWorkerVODConnectionManager:
|
|||
|
||||
except Exception as e:
|
||||
logger.error(f"[{client_id}] Worker {self.worker_id} - Error in Redis-backed stream_content_with_session: {e}", exc_info=True)
|
||||
|
||||
# Decrement profile connections if we incremented them but failed before streaming started
|
||||
if profile_connections_incremented:
|
||||
logger.info(f"[{client_id}] Connection error occurred after profile increment - decrementing profile connections")
|
||||
self._decrement_profile_connections(m3u_profile.id)
|
||||
|
||||
# Also clean up the Redis connection state since we won't be using it
|
||||
if redis_connection:
|
||||
try:
|
||||
redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id)
|
||||
except Exception as cleanup_error:
|
||||
logger.error(f"[{client_id}] Error during cleanup after connection failure: {cleanup_error}")
|
||||
|
||||
return HttpResponse(f"Streaming error: {str(e)}", status=500)
|
||||
|
||||
def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None):
|
||||
|
|
|
|||
|
|
@ -21,4 +21,7 @@ urlpatterns = [
|
|||
|
||||
# VOD Stats
|
||||
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
||||
|
||||
# Stop VOD client connection
|
||||
path('stop_client/', views.stop_vod_client, name='stop_vod_client'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from django.views import View
|
|||
from apps.vod.models import Movie, Series, Episode
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url, get_vod_client_stop_key
|
||||
from .utils import get_client_info, create_vod_response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -329,7 +329,11 @@ class VODStreamView(View):
|
|||
# Store the total content length in Redis for the persistent connection to use
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{session_id}"
|
||||
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
||||
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
||||
|
|
@ -1011,3 +1015,59 @@ class VODStatsView(View):
|
|||
except Exception as e:
|
||||
logger.error(f"Error getting VOD stats: {e}")
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from apps.accounts.permissions import IsAdmin
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdmin])
|
||||
def stop_vod_client(request):
|
||||
"""Stop a specific VOD client connection using stop signal mechanism"""
|
||||
try:
|
||||
# Parse request body
|
||||
import json
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
client_id = data.get('client_id')
|
||||
if not client_id:
|
||||
return JsonResponse({'error': 'No client_id provided'}, status=400)
|
||||
|
||||
logger.info(f"Request to stop VOD client: {client_id}")
|
||||
|
||||
# Get Redis client
|
||||
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||
redis_client = connection_manager.redis_client
|
||||
|
||||
if not redis_client:
|
||||
return JsonResponse({'error': 'Redis not available'}, status=500)
|
||||
|
||||
# Check if connection exists
|
||||
connection_key = f"vod_persistent_connection:{client_id}"
|
||||
connection_data = redis_client.hgetall(connection_key)
|
||||
if not connection_data:
|
||||
logger.warning(f"VOD connection not found: {client_id}")
|
||||
return JsonResponse({'error': 'Connection not found'}, status=404)
|
||||
|
||||
# Set a stop signal key that the worker will check
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
redis_client.setex(stop_key, 60, "true") # 60 second TTL
|
||||
|
||||
logger.info(f"Set stop signal for VOD client: {client_id}")
|
||||
|
||||
return JsonResponse({
|
||||
'message': 'VOD client stop signal sent',
|
||||
'client_id': client_id,
|
||||
'stop_key': stop_key
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping VOD client: {e}", exc_info=True)
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ from apps.accounts.permissions import (
|
|||
)
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode, VODLogo,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation, M3UVODCategoryRelation
|
||||
)
|
||||
from .serializers import (
|
||||
MovieSerializer,
|
||||
|
|
@ -62,7 +62,7 @@ class MovieFilter(django_filters.FilterSet):
|
|||
|
||||
# Handle the format 'category_name|category_type'
|
||||
if '|' in value:
|
||||
category_name, category_type = value.split('|', 1)
|
||||
category_name, category_type = value.rsplit('|', 1)
|
||||
return queryset.filter(
|
||||
m3u_relations__category__name=category_name,
|
||||
m3u_relations__category__category_type=category_type
|
||||
|
|
@ -219,7 +219,7 @@ class SeriesFilter(django_filters.FilterSet):
|
|||
|
||||
# Handle the format 'category_name|category_type'
|
||||
if '|' in value:
|
||||
category_name, category_type = value.split('|', 1)
|
||||
category_name, category_type = value.rsplit('|', 1)
|
||||
return queryset.filter(
|
||||
m3u_relations__category__name=category_name,
|
||||
m3u_relations__category__category_type=category_type
|
||||
|
|
@ -476,6 +476,59 @@ class VODCategoryViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""Override list to ensure Uncategorized categories and relations exist for all XC accounts with VOD enabled"""
|
||||
from apps.m3u.models import M3UAccount
|
||||
|
||||
# Ensure Uncategorized categories exist
|
||||
movie_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="movie",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
series_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="series",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Get all active XC accounts with VOD enabled
|
||||
xc_accounts = M3UAccount.objects.filter(
|
||||
account_type=M3UAccount.Types.XC,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
for account in xc_accounts:
|
||||
if account.custom_properties:
|
||||
custom_props = account.custom_properties or {}
|
||||
vod_enabled = custom_props.get("enable_vod", False)
|
||||
|
||||
if vod_enabled:
|
||||
# Ensure relations exist for this account
|
||||
auto_enable_new = custom_props.get("auto_enable_new_groups_vod", True)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=movie_category,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=series_category,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
# Now proceed with normal list operation
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
|
||||
class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""ViewSet that combines Movies and Series for unified 'All' view"""
|
||||
|
|
@ -535,7 +588,7 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
|
||||
if category:
|
||||
if '|' in category:
|
||||
cat_name, cat_type = category.split('|', 1)
|
||||
cat_name, cat_type = category.rsplit('|', 1)
|
||||
if cat_type == 'movie':
|
||||
where_conditions[0] += " AND movies.id IN (SELECT movie_id FROM vod_m3umovierelation mmr JOIN vod_vodcategory c ON mmr.category_id = c.id WHERE c.name = %s)"
|
||||
where_conditions[1] = "1=0" # Exclude series
|
||||
|
|
|
|||
|
|
@ -245,10 +245,13 @@ class M3UMovieRelation(models.Model):
|
|||
"""Get the full stream URL for this movie from this provider"""
|
||||
# Build URL dynamically for XtreamCodes accounts
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
from core.xtream_codes import Client as XCClient
|
||||
# Use XC client's URL normalization to handle malformed URLs
|
||||
# (e.g., URLs with /player_api.php or query parameters)
|
||||
normalized_url = XCClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url)
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
return f"{normalized_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# For other account types, we would need another way to build URLs
|
||||
return None
|
||||
|
|
@ -285,10 +288,12 @@ class M3UEpisodeRelation(models.Model):
|
|||
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
# For XtreamCodes accounts, build the URL dynamically
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
# Use XC client's URL normalization to handle malformed URLs
|
||||
# (e.g., URLs with /player_api.php or query parameters)
|
||||
normalized_url = XtreamCodesClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url)
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
return f"{normalized_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# We might support non XC accounts in the future
|
||||
# For now, return None
|
||||
|
|
|
|||
|
|
@ -127,6 +127,37 @@ def refresh_movies(client, account, categories_by_provider, relations, scan_star
|
|||
"""Refresh movie content using single API call for all movies"""
|
||||
logger.info(f"Refreshing movies for account {account.name}")
|
||||
|
||||
# Ensure "Uncategorized" category exists for movies without a category
|
||||
uncategorized_category, created = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="movie",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Ensure there's a relation for the Uncategorized category
|
||||
account_custom_props = account.custom_properties or {}
|
||||
auto_enable_new = account_custom_props.get("auto_enable_new_groups_vod", True)
|
||||
|
||||
uncategorized_relation, rel_created = M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=uncategorized_category,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
if created:
|
||||
logger.info(f"Created 'Uncategorized' category for movies")
|
||||
if rel_created:
|
||||
logger.info(f"Created relation for 'Uncategorized' category (enabled={auto_enable_new})")
|
||||
|
||||
# Add uncategorized category to relations dict for easy access
|
||||
relations[uncategorized_category.id] = uncategorized_relation
|
||||
|
||||
# Add to categories_by_provider with a special key for items without category
|
||||
categories_by_provider['__uncategorized__'] = uncategorized_category
|
||||
|
||||
# Get all movies in a single API call
|
||||
logger.info("Fetching all movies from provider...")
|
||||
all_movies_data = client.get_vod_streams() # No category_id = get all movies
|
||||
|
|
@ -150,6 +181,37 @@ def refresh_series(client, account, categories_by_provider, relations, scan_star
|
|||
"""Refresh series content using single API call for all series"""
|
||||
logger.info(f"Refreshing series for account {account.name}")
|
||||
|
||||
# Ensure "Uncategorized" category exists for series without a category
|
||||
uncategorized_category, created = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="series",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Ensure there's a relation for the Uncategorized category
|
||||
account_custom_props = account.custom_properties or {}
|
||||
auto_enable_new = account_custom_props.get("auto_enable_new_groups_series", True)
|
||||
|
||||
uncategorized_relation, rel_created = M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=uncategorized_category,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
if created:
|
||||
logger.info(f"Created 'Uncategorized' category for series")
|
||||
if rel_created:
|
||||
logger.info(f"Created relation for 'Uncategorized' category (enabled={auto_enable_new})")
|
||||
|
||||
# Add uncategorized category to relations dict for easy access
|
||||
relations[uncategorized_category.id] = uncategorized_relation
|
||||
|
||||
# Add to categories_by_provider with a special key for items without category
|
||||
categories_by_provider['__uncategorized__'] = uncategorized_category
|
||||
|
||||
# Get all series in a single API call
|
||||
logger.info("Fetching all series from provider...")
|
||||
all_series_data = client.get_series() # No category_id = get all series
|
||||
|
|
@ -240,6 +302,7 @@ def batch_create_categories(categories_data, category_type, account):
|
|||
M3UVODCategoryRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
# Delete orphaned category relationships (categories no longer in the M3U source)
|
||||
# Exclude "Uncategorized" from cleanup as it's a special category we manage
|
||||
current_category_ids = set(existing_categories[name].id for name in category_names)
|
||||
existing_relations = M3UVODCategoryRelation.objects.filter(
|
||||
m3u_account=account,
|
||||
|
|
@ -248,7 +311,7 @@ def batch_create_categories(categories_data, category_type, account):
|
|||
|
||||
relations_to_delete = [
|
||||
rel for rel in existing_relations
|
||||
if rel.category_id not in current_category_ids
|
||||
if rel.category_id not in current_category_ids and rel.category.name != "Uncategorized"
|
||||
]
|
||||
|
||||
if relations_to_delete:
|
||||
|
|
@ -331,17 +394,26 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
logger.debug("Skipping disabled category")
|
||||
continue
|
||||
else:
|
||||
logger.warning(f"No category ID provided for movie {name}")
|
||||
# Assign to Uncategorized category if no category_id provided
|
||||
logger.debug(f"No category ID provided for movie {name}, assigning to 'Uncategorized'")
|
||||
category = categories.get('__uncategorized__')
|
||||
if category:
|
||||
movie_data['_category_id'] = category.id
|
||||
# Check if uncategorized is disabled
|
||||
relation = relations.get(category.id, None)
|
||||
if relation and not relation.enabled:
|
||||
logger.debug("Skipping disabled 'Uncategorized' category")
|
||||
continue
|
||||
|
||||
# Extract metadata
|
||||
year = extract_year_from_data(movie_data, 'name')
|
||||
tmdb_id = movie_data.get('tmdb_id') or movie_data.get('tmdb')
|
||||
imdb_id = movie_data.get('imdb_id') or movie_data.get('imdb')
|
||||
|
||||
# Clean empty string IDs
|
||||
if tmdb_id == '':
|
||||
# Clean empty string IDs and zero values (some providers use 0 to indicate no ID)
|
||||
if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0':
|
||||
tmdb_id = None
|
||||
if imdb_id == '':
|
||||
if imdb_id == '' or imdb_id == 0 or imdb_id == '0':
|
||||
imdb_id = None
|
||||
|
||||
# Create a unique key for this movie (priority: TMDB > IMDB > name+year)
|
||||
|
|
@ -542,26 +614,41 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
# First, create new movies and get their IDs
|
||||
created_movies = {}
|
||||
if movies_to_create:
|
||||
Movie.objects.bulk_create(movies_to_create, ignore_conflicts=True)
|
||||
# Bulk query to check which movies already exist
|
||||
tmdb_ids = [m.tmdb_id for m in movies_to_create if m.tmdb_id]
|
||||
imdb_ids = [m.imdb_id for m in movies_to_create if m.imdb_id]
|
||||
name_year_pairs = [(m.name, m.year) for m in movies_to_create if not m.tmdb_id and not m.imdb_id]
|
||||
|
||||
# Get the newly created movies with their IDs
|
||||
# We need to re-fetch them to get the primary keys
|
||||
existing_by_tmdb = {m.tmdb_id: m for m in Movie.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {}
|
||||
existing_by_imdb = {m.imdb_id: m for m in Movie.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {}
|
||||
|
||||
existing_by_name_year = {}
|
||||
if name_year_pairs:
|
||||
for movie in Movie.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True):
|
||||
key = (movie.name, movie.year)
|
||||
if key in name_year_pairs:
|
||||
existing_by_name_year[key] = movie
|
||||
|
||||
# Check each movie against the bulk query results
|
||||
movies_actually_created = []
|
||||
for movie in movies_to_create:
|
||||
# Find the movie by its unique identifiers
|
||||
if movie.tmdb_id:
|
||||
db_movie = Movie.objects.filter(tmdb_id=movie.tmdb_id).first()
|
||||
elif movie.imdb_id:
|
||||
db_movie = Movie.objects.filter(imdb_id=movie.imdb_id).first()
|
||||
else:
|
||||
db_movie = Movie.objects.filter(
|
||||
name=movie.name,
|
||||
year=movie.year,
|
||||
tmdb_id__isnull=True,
|
||||
imdb_id__isnull=True
|
||||
).first()
|
||||
existing = None
|
||||
if movie.tmdb_id and movie.tmdb_id in existing_by_tmdb:
|
||||
existing = existing_by_tmdb[movie.tmdb_id]
|
||||
elif movie.imdb_id and movie.imdb_id in existing_by_imdb:
|
||||
existing = existing_by_imdb[movie.imdb_id]
|
||||
elif not movie.tmdb_id and not movie.imdb_id:
|
||||
existing = existing_by_name_year.get((movie.name, movie.year))
|
||||
|
||||
if db_movie:
|
||||
created_movies[id(movie)] = db_movie
|
||||
if existing:
|
||||
created_movies[id(movie)] = existing
|
||||
else:
|
||||
movies_actually_created.append(movie)
|
||||
created_movies[id(movie)] = movie
|
||||
|
||||
# Bulk create only movies that don't exist
|
||||
if movies_actually_created:
|
||||
Movie.objects.bulk_create(movies_actually_created)
|
||||
|
||||
# Update existing movies
|
||||
if movies_to_update:
|
||||
|
|
@ -577,12 +664,16 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
movie.logo = movie._logo_to_update
|
||||
movie.save(update_fields=['logo'])
|
||||
|
||||
# Update relations to reference the correct movie objects
|
||||
# Update relations to reference the correct movie objects (with PKs)
|
||||
for relation in relations_to_create:
|
||||
if id(relation.movie) in created_movies:
|
||||
relation.movie = created_movies[id(relation.movie)]
|
||||
|
||||
# Handle relations
|
||||
for relation in relations_to_update:
|
||||
if id(relation.movie) in created_movies:
|
||||
relation.movie = created_movies[id(relation.movie)]
|
||||
|
||||
# All movies now have PKs, safe to bulk create/update relations
|
||||
if relations_to_create:
|
||||
M3UMovieRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
|
|
@ -633,7 +724,16 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
logger.debug("Skipping disabled category")
|
||||
continue
|
||||
else:
|
||||
logger.warning(f"No category ID provided for series {name}")
|
||||
# Assign to Uncategorized category if no category_id provided
|
||||
logger.debug(f"No category ID provided for series {name}, assigning to 'Uncategorized'")
|
||||
category = categories.get('__uncategorized__')
|
||||
if category:
|
||||
series_data['_category_id'] = category.id
|
||||
# Check if uncategorized is disabled
|
||||
relation = relations.get(category.id, None)
|
||||
if relation and not relation.enabled:
|
||||
logger.debug("Skipping disabled 'Uncategorized' category")
|
||||
continue
|
||||
|
||||
# Extract metadata
|
||||
year = extract_year(series_data.get('releaseDate', ''))
|
||||
|
|
@ -643,10 +743,10 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
tmdb_id = series_data.get('tmdb') or series_data.get('tmdb_id')
|
||||
imdb_id = series_data.get('imdb') or series_data.get('imdb_id')
|
||||
|
||||
# Clean empty string IDs
|
||||
if tmdb_id == '':
|
||||
# Clean empty string IDs and zero values (some providers use 0 to indicate no ID)
|
||||
if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0':
|
||||
tmdb_id = None
|
||||
if imdb_id == '':
|
||||
if imdb_id == '' or imdb_id == 0 or imdb_id == '0':
|
||||
imdb_id = None
|
||||
|
||||
# Create a unique key for this series (priority: TMDB > IMDB > name+year)
|
||||
|
|
@ -864,26 +964,41 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
# First, create new series and get their IDs
|
||||
created_series = {}
|
||||
if series_to_create:
|
||||
Series.objects.bulk_create(series_to_create, ignore_conflicts=True)
|
||||
# Bulk query to check which series already exist
|
||||
tmdb_ids = [s.tmdb_id for s in series_to_create if s.tmdb_id]
|
||||
imdb_ids = [s.imdb_id for s in series_to_create if s.imdb_id]
|
||||
name_year_pairs = [(s.name, s.year) for s in series_to_create if not s.tmdb_id and not s.imdb_id]
|
||||
|
||||
# Get the newly created series with their IDs
|
||||
# We need to re-fetch them to get the primary keys
|
||||
existing_by_tmdb = {s.tmdb_id: s for s in Series.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {}
|
||||
existing_by_imdb = {s.imdb_id: s for s in Series.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {}
|
||||
|
||||
existing_by_name_year = {}
|
||||
if name_year_pairs:
|
||||
for series in Series.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True):
|
||||
key = (series.name, series.year)
|
||||
if key in name_year_pairs:
|
||||
existing_by_name_year[key] = series
|
||||
|
||||
# Check each series against the bulk query results
|
||||
series_actually_created = []
|
||||
for series in series_to_create:
|
||||
# Find the series by its unique identifiers
|
||||
if series.tmdb_id:
|
||||
db_series = Series.objects.filter(tmdb_id=series.tmdb_id).first()
|
||||
elif series.imdb_id:
|
||||
db_series = Series.objects.filter(imdb_id=series.imdb_id).first()
|
||||
else:
|
||||
db_series = Series.objects.filter(
|
||||
name=series.name,
|
||||
year=series.year,
|
||||
tmdb_id__isnull=True,
|
||||
imdb_id__isnull=True
|
||||
).first()
|
||||
existing = None
|
||||
if series.tmdb_id and series.tmdb_id in existing_by_tmdb:
|
||||
existing = existing_by_tmdb[series.tmdb_id]
|
||||
elif series.imdb_id and series.imdb_id in existing_by_imdb:
|
||||
existing = existing_by_imdb[series.imdb_id]
|
||||
elif not series.tmdb_id and not series.imdb_id:
|
||||
existing = existing_by_name_year.get((series.name, series.year))
|
||||
|
||||
if db_series:
|
||||
created_series[id(series)] = db_series
|
||||
if existing:
|
||||
created_series[id(series)] = existing
|
||||
else:
|
||||
series_actually_created.append(series)
|
||||
created_series[id(series)] = series
|
||||
|
||||
# Bulk create only series that don't exist
|
||||
if series_actually_created:
|
||||
Series.objects.bulk_create(series_actually_created)
|
||||
|
||||
# Update existing series
|
||||
if series_to_update:
|
||||
|
|
@ -899,12 +1014,16 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
series.logo = series._logo_to_update
|
||||
series.save(update_fields=['logo'])
|
||||
|
||||
# Update relations to reference the correct series objects
|
||||
# Update relations to reference the correct series objects (with PKs)
|
||||
for relation in relations_to_create:
|
||||
if id(relation.series) in created_series:
|
||||
relation.series = created_series[id(relation.series)]
|
||||
|
||||
# Handle relations
|
||||
for relation in relations_to_update:
|
||||
if id(relation.series) in created_series:
|
||||
relation.series = created_series[id(relation.series)]
|
||||
|
||||
# All series now have PKs, safe to bulk create/update relations
|
||||
if relations_to_create:
|
||||
M3USeriesRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
|
|
@ -1151,7 +1270,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
|
|||
|
||||
|
||||
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
|
||||
"""Process episodes in batches for better performance"""
|
||||
"""Process episodes in batches for better performance.
|
||||
|
||||
Note: Multiple streams can represent the same episode (e.g., different languages
|
||||
or qualities). Each stream has a unique stream_id, but they share the same
|
||||
season/episode number. We create one Episode record per (series, season, episode)
|
||||
and multiple M3UEpisodeRelation records pointing to it.
|
||||
"""
|
||||
if not episodes_data:
|
||||
return
|
||||
|
||||
|
|
@ -1168,12 +1293,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
|
||||
|
||||
# Extract episode identifiers
|
||||
episode_keys = []
|
||||
# Note: episode_keys may have duplicates when multiple streams represent same episode
|
||||
episode_keys = set() # Use set to track unique episode keys
|
||||
episode_ids = []
|
||||
for episode_data in all_episodes_data:
|
||||
season_num = episode_data['_season_number']
|
||||
episode_num = episode_data.get('episode_num', 0)
|
||||
episode_keys.append((series.id, season_num, episode_num))
|
||||
episode_keys.add((series.id, season_num, episode_num))
|
||||
episode_ids.append(str(episode_data.get('id')))
|
||||
|
||||
# Pre-fetch existing episodes
|
||||
|
|
@ -1196,12 +1322,25 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
relations_to_create = []
|
||||
relations_to_update = []
|
||||
|
||||
# Track episodes we're creating in this batch to avoid duplicates
|
||||
# Key: (series_id, season_number, episode_number) -> Episode object
|
||||
episodes_pending_creation = {}
|
||||
|
||||
for episode_data in all_episodes_data:
|
||||
try:
|
||||
episode_id = str(episode_data.get('id'))
|
||||
episode_name = episode_data.get('title', 'Unknown Episode')
|
||||
season_number = episode_data['_season_number']
|
||||
episode_number = episode_data.get('episode_num', 0)
|
||||
# Ensure season and episode numbers are integers (API may return strings)
|
||||
try:
|
||||
season_number = int(episode_data['_season_number'])
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.warning(f"Invalid season_number '{episode_data.get('_season_number')}' for episode '{episode_name}': {e}")
|
||||
season_number = 0
|
||||
try:
|
||||
episode_number = int(episode_data.get('episode_num', 0))
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.warning(f"Invalid episode_num '{episode_data.get('episode_num')}' for episode '{episode_name}': {e}")
|
||||
episode_number = 0
|
||||
info = episode_data.get('info', {})
|
||||
|
||||
# Extract episode metadata
|
||||
|
|
@ -1225,10 +1364,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
if backdrop:
|
||||
custom_props['backdrop_path'] = [backdrop]
|
||||
|
||||
# Find existing episode
|
||||
# Find existing episode - check DB first, then pending creations
|
||||
episode_key = (series.id, season_number, episode_number)
|
||||
episode = existing_episodes.get(episode_key)
|
||||
|
||||
# Check if we already have this episode pending creation (multiple streams for same episode)
|
||||
if not episode and episode_key in episodes_pending_creation:
|
||||
episode = episodes_pending_creation[episode_key]
|
||||
logger.debug(f"Reusing pending episode for S{season_number}E{episode_number} (stream_id: {episode_id})")
|
||||
|
||||
if episode:
|
||||
# Update existing episode
|
||||
updated = False
|
||||
|
|
@ -1257,7 +1401,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
episode.custom_properties = custom_props if custom_props else None
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
# Only add to update list if episode has a PK (exists in DB) and isn't already in list
|
||||
# Episodes pending creation don't have PKs yet and will be created via bulk_create
|
||||
if updated and episode.pk and episode not in episodes_to_update:
|
||||
episodes_to_update.append(episode)
|
||||
else:
|
||||
# Create new episode
|
||||
|
|
@ -1275,6 +1421,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
custom_properties=custom_props if custom_props else None
|
||||
)
|
||||
episodes_to_create.append(episode)
|
||||
# Track this episode so subsequent streams with same season/episode can reuse it
|
||||
episodes_pending_creation[episode_key] = episode
|
||||
|
||||
# Handle episode relation
|
||||
if episode_id in existing_relations:
|
||||
|
|
@ -1308,9 +1456,43 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
|
||||
# Execute batch operations
|
||||
with transaction.atomic():
|
||||
# Create new episodes
|
||||
# Create new episodes - use ignore_conflicts in case of race conditions
|
||||
if episodes_to_create:
|
||||
Episode.objects.bulk_create(episodes_to_create)
|
||||
Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True)
|
||||
|
||||
# Re-fetch the created episodes to get their PKs
|
||||
# We need to do this because bulk_create with ignore_conflicts doesn't set PKs
|
||||
created_episode_keys = [
|
||||
(ep.series_id, ep.season_number, ep.episode_number)
|
||||
for ep in episodes_to_create
|
||||
]
|
||||
db_episodes = Episode.objects.filter(series=series)
|
||||
episode_pk_map = {
|
||||
(ep.series_id, ep.season_number, ep.episode_number): ep
|
||||
for ep in db_episodes
|
||||
}
|
||||
|
||||
# Update relations to point to the actual DB episodes with PKs
|
||||
for relation in relations_to_create:
|
||||
ep = relation.episode
|
||||
key = (ep.series_id, ep.season_number, ep.episode_number)
|
||||
if key in episode_pk_map:
|
||||
relation.episode = episode_pk_map[key]
|
||||
|
||||
# Filter out relations with unsaved episodes (no PK)
|
||||
# This can happen if bulk_create had a conflict and ignore_conflicts=True didn't save the episode
|
||||
valid_relations_to_create = []
|
||||
for relation in relations_to_create:
|
||||
if relation.episode.pk is not None:
|
||||
valid_relations_to_create.append(relation)
|
||||
else:
|
||||
season_num = relation.episode.season_number
|
||||
episode_num = relation.episode.episode_number
|
||||
logger.warning(
|
||||
f"Skipping relation for episode S{season_num}E{episode_num} "
|
||||
f"- episode not saved to database"
|
||||
)
|
||||
relations_to_create = valid_relations_to_create
|
||||
|
||||
# Update existing episodes
|
||||
if episodes_to_update:
|
||||
|
|
@ -1319,9 +1501,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
'tmdb_id', 'imdb_id', 'custom_properties'
|
||||
])
|
||||
|
||||
# Create new episode relations
|
||||
# Create new episode relations - use ignore_conflicts for stream_id duplicates
|
||||
if relations_to_create:
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create)
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
# Update existing episode relations
|
||||
if relations_to_update:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,16 @@
|
|||
|
||||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint, TimezoneListView
|
||||
from .api_views import (
|
||||
UserAgentViewSet,
|
||||
StreamProfileViewSet,
|
||||
CoreSettingsViewSet,
|
||||
environment,
|
||||
version,
|
||||
rehash_streams_endpoint,
|
||||
TimezoneListView,
|
||||
get_system_events
|
||||
)
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'useragents', UserAgentViewSet, basename='useragent')
|
||||
|
|
@ -13,5 +22,6 @@ urlpatterns = [
|
|||
path('version/', version, name='version'),
|
||||
path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'),
|
||||
path('timezones/', TimezoneListView.as_view(), name='timezones'),
|
||||
path('system-events/', get_system_events, name='system_events'),
|
||||
path('', include(router.urls)),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -15,8 +15,9 @@ from .models import (
|
|||
UserAgent,
|
||||
StreamProfile,
|
||||
CoreSettings,
|
||||
STREAM_HASH_KEY,
|
||||
NETWORK_ACCESS,
|
||||
STREAM_SETTINGS_KEY,
|
||||
DVR_SETTINGS_KEY,
|
||||
NETWORK_ACCESS_KEY,
|
||||
PROXY_SETTINGS_KEY,
|
||||
)
|
||||
from .serializers import (
|
||||
|
|
@ -68,16 +69,28 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
|
|||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
old_value = instance.value
|
||||
response = super().update(request, *args, **kwargs)
|
||||
if instance.key == STREAM_HASH_KEY:
|
||||
if instance.value != request.data["value"]:
|
||||
rehash_streams.delay(request.data["value"].split(","))
|
||||
|
||||
# If DVR pre/post offsets changed, reschedule upcoming recordings
|
||||
try:
|
||||
from core.models import DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY
|
||||
if instance.key in (DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY):
|
||||
if instance.value != request.data.get("value"):
|
||||
# If stream settings changed and m3u_hash_key is different, rehash streams
|
||||
if instance.key == STREAM_SETTINGS_KEY:
|
||||
new_value = request.data.get("value", {})
|
||||
if isinstance(new_value, dict) and isinstance(old_value, dict):
|
||||
old_hash = old_value.get("m3u_hash_key", "")
|
||||
new_hash = new_value.get("m3u_hash_key", "")
|
||||
if old_hash != new_hash:
|
||||
hash_keys = new_hash.split(",") if isinstance(new_hash, str) else new_hash
|
||||
rehash_streams.delay(hash_keys)
|
||||
|
||||
# If DVR settings changed and pre/post offsets are different, reschedule upcoming recordings
|
||||
if instance.key == DVR_SETTINGS_KEY:
|
||||
new_value = request.data.get("value", {})
|
||||
if isinstance(new_value, dict) and isinstance(old_value, dict):
|
||||
old_pre = old_value.get("pre_offset_minutes")
|
||||
new_pre = new_value.get("pre_offset_minutes")
|
||||
old_post = old_value.get("post_offset_minutes")
|
||||
new_post = new_value.get("post_offset_minutes")
|
||||
if old_pre != new_pre or old_post != new_post:
|
||||
try:
|
||||
# Prefer async task if Celery is available
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change
|
||||
|
|
@ -86,24 +99,23 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
|
|||
# Fallback to synchronous implementation
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl
|
||||
reschedule_upcoming_recordings_for_offset_change_impl()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return response
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
response = super().create(request, *args, **kwargs)
|
||||
# If creating DVR pre/post offset settings, also reschedule upcoming recordings
|
||||
# If creating DVR settings with offset values, reschedule upcoming recordings
|
||||
try:
|
||||
key = request.data.get("key")
|
||||
from core.models import DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY
|
||||
if key in (DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY):
|
||||
try:
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change
|
||||
reschedule_upcoming_recordings_for_offset_change.delay()
|
||||
except Exception:
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl
|
||||
reschedule_upcoming_recordings_for_offset_change_impl()
|
||||
if key == DVR_SETTINGS_KEY:
|
||||
value = request.data.get("value", {})
|
||||
if isinstance(value, dict) and ("pre_offset_minutes" in value or "post_offset_minutes" in value):
|
||||
try:
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change
|
||||
reschedule_upcoming_recordings_for_offset_change.delay()
|
||||
except Exception:
|
||||
from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl
|
||||
reschedule_upcoming_recordings_for_offset_change_impl()
|
||||
except Exception:
|
||||
pass
|
||||
return response
|
||||
|
|
@ -111,13 +123,13 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
|
|||
def check(self, request, *args, **kwargs):
|
||||
data = request.data
|
||||
|
||||
if data.get("key") == NETWORK_ACCESS:
|
||||
if data.get("key") == NETWORK_ACCESS_KEY:
|
||||
client_ip = ipaddress.ip_address(get_client_ip(request))
|
||||
|
||||
in_network = {}
|
||||
invalid = []
|
||||
|
||||
value = json.loads(data.get("value", "{}"))
|
||||
value = data.get("value", {})
|
||||
for key, val in value.items():
|
||||
in_network[key] = []
|
||||
cidrs = val.split(",")
|
||||
|
|
@ -143,7 +155,11 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
|
|||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(in_network, status=status.HTTP_200_OK)
|
||||
response_data = {
|
||||
**in_network,
|
||||
"client_ip": str(client_ip)
|
||||
}
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
return Response({}, status=status.HTTP_200_OK)
|
||||
|
||||
|
|
@ -157,8 +173,8 @@ class ProxySettingsViewSet(viewsets.ViewSet):
|
|||
"""Get or create the proxy settings CoreSettings entry"""
|
||||
try:
|
||||
settings_obj = CoreSettings.objects.get(key=PROXY_SETTINGS_KEY)
|
||||
settings_data = json.loads(settings_obj.value)
|
||||
except (CoreSettings.DoesNotExist, json.JSONDecodeError):
|
||||
settings_data = settings_obj.value
|
||||
except CoreSettings.DoesNotExist:
|
||||
# Create default settings
|
||||
settings_data = {
|
||||
"buffering_timeout": 15,
|
||||
|
|
@ -171,7 +187,7 @@ class ProxySettingsViewSet(viewsets.ViewSet):
|
|||
key=PROXY_SETTINGS_KEY,
|
||||
defaults={
|
||||
"name": "Proxy Settings",
|
||||
"value": json.dumps(settings_data)
|
||||
"value": settings_data
|
||||
}
|
||||
)
|
||||
return settings_obj, settings_data
|
||||
|
|
@ -193,8 +209,8 @@ class ProxySettingsViewSet(viewsets.ViewSet):
|
|||
serializer = ProxySettingsSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Update the JSON data
|
||||
settings_obj.value = json.dumps(serializer.validated_data)
|
||||
# Update the JSON data - store as dict directly
|
||||
settings_obj.value = serializer.validated_data
|
||||
settings_obj.save()
|
||||
|
||||
return Response(serializer.validated_data)
|
||||
|
|
@ -209,8 +225,8 @@ class ProxySettingsViewSet(viewsets.ViewSet):
|
|||
serializer = ProxySettingsSerializer(data=updated_data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Update the JSON data
|
||||
settings_obj.value = json.dumps(serializer.validated_data)
|
||||
# Update the JSON data - store as dict directly
|
||||
settings_obj.value = serializer.validated_data
|
||||
settings_obj.save()
|
||||
|
||||
return Response(serializer.validated_data)
|
||||
|
|
@ -328,8 +344,8 @@ def rehash_streams_endpoint(request):
|
|||
"""Trigger the rehash streams task"""
|
||||
try:
|
||||
# Get the current hash keys from settings
|
||||
hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY)
|
||||
hash_keys = hash_key_setting.value.split(",")
|
||||
hash_key = CoreSettings.get_m3u_hash_key()
|
||||
hash_keys = hash_key.split(",") if isinstance(hash_key, str) else hash_key
|
||||
|
||||
# Queue the rehash task
|
||||
task = rehash_streams.delay(hash_keys)
|
||||
|
|
@ -340,10 +356,10 @@ def rehash_streams_endpoint(request):
|
|||
"task_id": task.id
|
||||
}, status=status.HTTP_200_OK)
|
||||
|
||||
except CoreSettings.DoesNotExist:
|
||||
except Exception as e:
|
||||
return Response({
|
||||
"success": False,
|
||||
"message": "Hash key settings not found"
|
||||
"message": f"Error triggering rehash: {str(e)}"
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
except Exception as e:
|
||||
|
|
@ -396,3 +412,64 @@ class TimezoneListView(APIView):
|
|||
'grouped': grouped,
|
||||
'count': len(all_timezones)
|
||||
})
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# System Events API
|
||||
# ─────────────────────────────
|
||||
@api_view(['GET'])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def get_system_events(request):
|
||||
"""
|
||||
Get recent system events (channel start/stop, buffering, client connections, etc.)
|
||||
|
||||
Query Parameters:
|
||||
limit: Number of events to return per page (default: 100, max: 1000)
|
||||
offset: Number of events to skip (for pagination, default: 0)
|
||||
event_type: Filter by specific event type (optional)
|
||||
"""
|
||||
from core.models import SystemEvent
|
||||
|
||||
try:
|
||||
# Get pagination params
|
||||
limit = min(int(request.GET.get('limit', 100)), 1000)
|
||||
offset = int(request.GET.get('offset', 0))
|
||||
|
||||
# Start with all events
|
||||
events = SystemEvent.objects.all()
|
||||
|
||||
# Filter by event_type if provided
|
||||
event_type = request.GET.get('event_type')
|
||||
if event_type:
|
||||
events = events.filter(event_type=event_type)
|
||||
|
||||
# Get total count before applying pagination
|
||||
total_count = events.count()
|
||||
|
||||
# Apply offset and limit for pagination
|
||||
events = events[offset:offset + limit]
|
||||
|
||||
# Serialize the data
|
||||
events_data = [{
|
||||
'id': event.id,
|
||||
'event_type': event.event_type,
|
||||
'event_type_display': event.get_event_type_display(),
|
||||
'timestamp': event.timestamp.isoformat(),
|
||||
'channel_id': str(event.channel_id) if event.channel_id else None,
|
||||
'channel_name': event.channel_name,
|
||||
'details': event.details
|
||||
} for event in events]
|
||||
|
||||
return Response({
|
||||
'events': events_data,
|
||||
'count': len(events_data),
|
||||
'total': total_count,
|
||||
'offset': offset,
|
||||
'limit': limit
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching system events: {e}")
|
||||
return Response({
|
||||
'error': 'Failed to fetch system events'
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@
|
|||
"model": "core.streamprofile",
|
||||
"pk": 1,
|
||||
"fields": {
|
||||
"name": "ffmpeg",
|
||||
"name": "FFmpeg",
|
||||
"command": "ffmpeg",
|
||||
"parameters": "-i {streamUrl} -c:v copy -c:a copy -f mpegts pipe:1",
|
||||
"is_active": true,
|
||||
|
|
@ -34,11 +34,22 @@
|
|||
"model": "core.streamprofile",
|
||||
"pk": 2,
|
||||
"fields": {
|
||||
"name": "streamlink",
|
||||
"name": "Streamlink",
|
||||
"command": "streamlink",
|
||||
"parameters": "{streamUrl} best --stdout",
|
||||
"is_active": true,
|
||||
"user_agent": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "core.streamprofile",
|
||||
"pk": 3,
|
||||
"fields": {
|
||||
"name": "VLC",
|
||||
"command": "cvlc",
|
||||
"parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}",
|
||||
"is_active": true,
|
||||
"user_agent": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
# your_app/management/commands/update_column.py
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from core.models import CoreSettings, NETWORK_ACCESS
|
||||
from core.models import CoreSettings, NETWORK_ACCESS_KEY
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reset network access settings"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
setting = CoreSettings.objects.get(key=NETWORK_ACCESS)
|
||||
setting.value = "{}"
|
||||
setting = CoreSettings.objects.get(key=NETWORK_ACCESS_KEY)
|
||||
setting.value = {}
|
||||
setting.save()
|
||||
|
|
|
|||
28
core/migrations/0017_systemevent.py
Normal file
28
core/migrations/0017_systemevent.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.2.4 on 2025-11-20 20:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0016_update_dvr_template_paths'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='SystemEvent',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('event_type', models.CharField(choices=[('channel_start', 'Channel Started'), ('channel_stop', 'Channel Stopped'), ('channel_buffering', 'Channel Buffering'), ('channel_failover', 'Channel Failover'), ('channel_reconnect', 'Channel Reconnected'), ('channel_error', 'Channel Error'), ('client_connect', 'Client Connected'), ('client_disconnect', 'Client Disconnected'), ('recording_start', 'Recording Started'), ('recording_end', 'Recording Ended'), ('stream_switch', 'Stream Switched'), ('m3u_refresh', 'M3U Refreshed'), ('m3u_download', 'M3U Downloaded'), ('epg_refresh', 'EPG Refreshed'), ('epg_download', 'EPG Downloaded')], db_index=True, max_length=50)),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
('channel_id', models.UUIDField(blank=True, db_index=True, null=True)),
|
||||
('channel_name', models.CharField(blank=True, max_length=255, null=True)),
|
||||
('details', models.JSONField(blank=True, default=dict)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-timestamp'],
|
||||
'indexes': [models.Index(fields=['-timestamp'], name='core_system_timesta_c6c3d1_idx'), models.Index(fields=['event_type', '-timestamp'], name='core_system_event_t_4267d9_idx')],
|
||||
},
|
||||
),
|
||||
]
|
||||
18
core/migrations/0018_alter_systemevent_event_type.py
Normal file
18
core/migrations/0018_alter_systemevent_event_type.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-11-21 15:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0017_systemevent'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemevent',
|
||||
name='event_type',
|
||||
field=models.CharField(choices=[('channel_start', 'Channel Started'), ('channel_stop', 'Channel Stopped'), ('channel_buffering', 'Channel Buffering'), ('channel_failover', 'Channel Failover'), ('channel_reconnect', 'Channel Reconnected'), ('channel_error', 'Channel Error'), ('client_connect', 'Client Connected'), ('client_disconnect', 'Client Disconnected'), ('recording_start', 'Recording Started'), ('recording_end', 'Recording Ended'), ('stream_switch', 'Stream Switched'), ('m3u_refresh', 'M3U Refreshed'), ('m3u_download', 'M3U Downloaded'), ('epg_refresh', 'EPG Refreshed'), ('epg_download', 'EPG Downloaded'), ('login_success', 'Login Successful'), ('login_failed', 'Login Failed'), ('logout', 'User Logged Out'), ('m3u_blocked', 'M3U Download Blocked'), ('epg_blocked', 'EPG Download Blocked')], db_index=True, max_length=50),
|
||||
),
|
||||
]
|
||||
42
core/migrations/0019_add_vlc_stream_profile.py
Normal file
42
core/migrations/0019_add_vlc_stream_profile.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# Generated migration to add VLC stream profile
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def add_vlc_profile(apps, schema_editor):
|
||||
StreamProfile = apps.get_model("core", "StreamProfile")
|
||||
UserAgent = apps.get_model("core", "UserAgent")
|
||||
|
||||
# Check if VLC profile already exists
|
||||
if not StreamProfile.objects.filter(name="VLC").exists():
|
||||
# Get the TiviMate user agent (should be pk=1)
|
||||
try:
|
||||
tivimate_ua = UserAgent.objects.get(pk=1)
|
||||
except UserAgent.DoesNotExist:
|
||||
# Fallback: get first available user agent
|
||||
tivimate_ua = UserAgent.objects.first()
|
||||
if not tivimate_ua:
|
||||
# No user agents exist, skip creating profile
|
||||
return
|
||||
|
||||
StreamProfile.objects.create(
|
||||
name="VLC",
|
||||
command="cvlc",
|
||||
parameters="-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}",
|
||||
is_active=True,
|
||||
user_agent=tivimate_ua,
|
||||
locked=True, # Make it read-only like ffmpeg/streamlink
|
||||
)
|
||||
|
||||
def remove_vlc_profile(apps, schema_editor):
|
||||
StreamProfile = apps.get_model("core", "StreamProfile")
|
||||
StreamProfile.objects.filter(name="VLC").delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_alter_systemevent_event_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(add_vlc_profile, remove_vlc_profile),
|
||||
]
|
||||
267
core/migrations/0020_change_coresettings_value_to_jsonfield.py
Normal file
267
core/migrations/0020_change_coresettings_value_to_jsonfield.py
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
# Generated migration to change CoreSettings value field to JSONField and consolidate settings
|
||||
|
||||
import json
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def convert_string_to_json(apps, schema_editor):
|
||||
"""Convert existing string values to appropriate JSON types before changing column type"""
|
||||
CoreSettings = apps.get_model("core", "CoreSettings")
|
||||
|
||||
for setting in CoreSettings.objects.all():
|
||||
value = setting.value
|
||||
|
||||
if not value:
|
||||
# Empty strings become empty string in JSON
|
||||
setting.value = json.dumps("")
|
||||
setting.save(update_fields=['value'])
|
||||
continue
|
||||
|
||||
# Try to parse as JSON if it looks like JSON (objects/arrays)
|
||||
if value.startswith('{') or value.startswith('['):
|
||||
try:
|
||||
parsed = json.loads(value)
|
||||
# Store as JSON string temporarily (column is still CharField)
|
||||
setting.value = json.dumps(parsed)
|
||||
setting.save(update_fields=['value'])
|
||||
continue
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
pass
|
||||
|
||||
# Try to parse as number
|
||||
try:
|
||||
# Check if it's an integer
|
||||
if '.' not in value and value.lstrip('-').isdigit():
|
||||
setting.value = json.dumps(int(value))
|
||||
setting.save(update_fields=['value'])
|
||||
continue
|
||||
# Check if it's a float
|
||||
float_val = float(value)
|
||||
setting.value = json.dumps(float_val)
|
||||
setting.save(update_fields=['value'])
|
||||
continue
|
||||
except (ValueError, AttributeError):
|
||||
pass
|
||||
|
||||
# Check for booleans
|
||||
if value.lower() in ('true', 'false', '1', '0', 'yes', 'no', 'on', 'off'):
|
||||
bool_val = value.lower() in ('true', '1', 'yes', 'on')
|
||||
setting.value = json.dumps(bool_val)
|
||||
setting.save(update_fields=['value'])
|
||||
continue
|
||||
|
||||
# Default: store as JSON string
|
||||
setting.value = json.dumps(value)
|
||||
setting.save(update_fields=['value'])
|
||||
|
||||
|
||||
def consolidate_settings(apps, schema_editor):
|
||||
"""Consolidate individual setting rows into grouped JSON objects."""
|
||||
CoreSettings = apps.get_model("core", "CoreSettings")
|
||||
|
||||
# Helper to get setting value
|
||||
def get_value(key, default=None):
|
||||
try:
|
||||
obj = CoreSettings.objects.get(key=key)
|
||||
return obj.value if obj.value is not None else default
|
||||
except CoreSettings.DoesNotExist:
|
||||
return default
|
||||
|
||||
# STREAM SETTINGS
|
||||
stream_settings = {
|
||||
"default_user_agent": get_value("default-user-agent"),
|
||||
"default_stream_profile": get_value("default-stream-profile"),
|
||||
"m3u_hash_key": get_value("m3u-hash-key", ""),
|
||||
"preferred_region": get_value("preferred-region"),
|
||||
"auto_import_mapped_files": get_value("auto-import-mapped-files"),
|
||||
}
|
||||
CoreSettings.objects.update_or_create(
|
||||
key="stream_settings",
|
||||
defaults={"name": "Stream Settings", "value": stream_settings}
|
||||
)
|
||||
|
||||
# DVR SETTINGS
|
||||
dvr_settings = {
|
||||
"tv_template": get_value("dvr-tv-template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv"),
|
||||
"movie_template": get_value("dvr-movie-template", "Movies/{title} ({year}).mkv"),
|
||||
"tv_fallback_dir": get_value("dvr-tv-fallback-dir", "TV_Shows"),
|
||||
"tv_fallback_template": get_value("dvr-tv-fallback-template", "TV_Shows/{show}/{start}.mkv"),
|
||||
"movie_fallback_template": get_value("dvr-movie-fallback-template", "Movies/{start}.mkv"),
|
||||
"comskip_enabled": bool(get_value("dvr-comskip-enabled", False)),
|
||||
"comskip_custom_path": get_value("dvr-comskip-custom-path", ""),
|
||||
"pre_offset_minutes": int(get_value("dvr-pre-offset-minutes", 0) or 0),
|
||||
"post_offset_minutes": int(get_value("dvr-post-offset-minutes", 0) or 0),
|
||||
"series_rules": get_value("dvr-series-rules", []),
|
||||
}
|
||||
CoreSettings.objects.update_or_create(
|
||||
key="dvr_settings",
|
||||
defaults={"name": "DVR Settings", "value": dvr_settings}
|
||||
)
|
||||
|
||||
# BACKUP SETTINGS - using underscore keys (not dashes)
|
||||
backup_settings = {
|
||||
"schedule_enabled": get_value("backup_schedule_enabled") if get_value("backup_schedule_enabled") is not None else True,
|
||||
"schedule_frequency": get_value("backup_schedule_frequency") or "daily",
|
||||
"schedule_time": get_value("backup_schedule_time") or "03:00",
|
||||
"schedule_day_of_week": get_value("backup_schedule_day_of_week") if get_value("backup_schedule_day_of_week") is not None else 0,
|
||||
"retention_count": get_value("backup_retention_count") if get_value("backup_retention_count") is not None else 3,
|
||||
"schedule_cron_expression": get_value("backup_schedule_cron_expression") or "",
|
||||
}
|
||||
CoreSettings.objects.update_or_create(
|
||||
key="backup_settings",
|
||||
defaults={"name": "Backup Settings", "value": backup_settings}
|
||||
)
|
||||
|
||||
# SYSTEM SETTINGS
|
||||
system_settings = {
|
||||
"time_zone": get_value("system-time-zone", "UTC"),
|
||||
"max_system_events": int(get_value("max-system-events", 100) or 100),
|
||||
}
|
||||
CoreSettings.objects.update_or_create(
|
||||
key="system_settings",
|
||||
defaults={"name": "System Settings", "value": system_settings}
|
||||
)
|
||||
|
||||
# Rename proxy-settings to proxy_settings (if it exists with old name)
|
||||
try:
|
||||
old_proxy = CoreSettings.objects.get(key="proxy-settings")
|
||||
old_proxy.key = "proxy_settings"
|
||||
old_proxy.save()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Ensure proxy_settings exists with defaults if not present
|
||||
proxy_obj, proxy_created = CoreSettings.objects.get_or_create(
|
||||
key="proxy_settings",
|
||||
defaults={
|
||||
"name": "Proxy Settings",
|
||||
"value": {
|
||||
"buffering_timeout": 15,
|
||||
"buffering_speed": 1.0,
|
||||
"redis_chunk_ttl": 60,
|
||||
"channel_shutdown_delay": 0,
|
||||
"channel_init_grace_period": 5,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Rename network-access to network_access (if it exists with old name)
|
||||
try:
|
||||
old_network = CoreSettings.objects.get(key="network-access")
|
||||
old_network.key = "network_access"
|
||||
old_network.save()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Ensure network_access exists with defaults if not present
|
||||
network_obj, network_created = CoreSettings.objects.get_or_create(
|
||||
key="network_access",
|
||||
defaults={
|
||||
"name": "Network Access",
|
||||
"value": {}
|
||||
}
|
||||
)
|
||||
# Delete old individual setting rows (keep only the new grouped settings)
|
||||
grouped_keys = ["stream_settings", "dvr_settings", "backup_settings", "system_settings", "proxy_settings", "network_access"]
|
||||
CoreSettings.objects.exclude(key__in=grouped_keys).delete()
|
||||
|
||||
|
||||
def reverse_migration(apps, schema_editor):
|
||||
"""Reverse migration: split grouped settings and convert JSON back to strings"""
|
||||
CoreSettings = apps.get_model("core", "CoreSettings")
|
||||
|
||||
# Helper to create individual setting
|
||||
def create_setting(key, name, value):
|
||||
# Convert value back to string representation for CharField
|
||||
if isinstance(value, str):
|
||||
str_value = value
|
||||
elif isinstance(value, bool):
|
||||
str_value = "true" if value else "false"
|
||||
elif isinstance(value, (int, float)):
|
||||
str_value = str(value)
|
||||
elif isinstance(value, (dict, list)):
|
||||
str_value = json.dumps(value)
|
||||
elif value is None:
|
||||
str_value = ""
|
||||
else:
|
||||
str_value = str(value)
|
||||
|
||||
CoreSettings.objects.update_or_create(
|
||||
key=key,
|
||||
defaults={"name": name, "value": str_value}
|
||||
)
|
||||
|
||||
# Split stream_settings
|
||||
try:
|
||||
stream = CoreSettings.objects.get(key="stream_settings")
|
||||
if isinstance(stream.value, dict):
|
||||
create_setting("default_user_agent", "Default User Agent", stream.value.get("default_user_agent"))
|
||||
create_setting("default_stream_profile", "Default Stream Profile", stream.value.get("default_stream_profile"))
|
||||
create_setting("stream_hash_key", "Stream Hash Key", stream.value.get("m3u_hash_key", ""))
|
||||
create_setting("preferred_region", "Preferred Region", stream.value.get("preferred_region"))
|
||||
create_setting("auto_import_mapped_files", "Auto Import Mapped Files", stream.value.get("auto_import_mapped_files"))
|
||||
stream.delete()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Split dvr_settings
|
||||
try:
|
||||
dvr = CoreSettings.objects.get(key="dvr_settings")
|
||||
if isinstance(dvr.value, dict):
|
||||
create_setting("dvr_tv_template", "DVR TV Template", dvr.value.get("tv_template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv"))
|
||||
create_setting("dvr_movie_template", "DVR Movie Template", dvr.value.get("movie_template", "Movies/{title} ({year}).mkv"))
|
||||
create_setting("dvr_tv_fallback_dir", "DVR TV Fallback Dir", dvr.value.get("tv_fallback_dir", "TV_Shows"))
|
||||
create_setting("dvr_tv_fallback_template", "DVR TV Fallback Template", dvr.value.get("tv_fallback_template", "TV_Shows/{show}/{start}.mkv"))
|
||||
create_setting("dvr_movie_fallback_template", "DVR Movie Fallback Template", dvr.value.get("movie_fallback_template", "Movies/{start}.mkv"))
|
||||
create_setting("dvr_comskip_enabled", "DVR Comskip Enabled", dvr.value.get("comskip_enabled", False))
|
||||
create_setting("dvr_comskip_custom_path", "DVR Comskip Custom Path", dvr.value.get("comskip_custom_path", ""))
|
||||
create_setting("dvr_pre_offset_minutes", "DVR Pre Offset Minutes", dvr.value.get("pre_offset_minutes", 0))
|
||||
create_setting("dvr_post_offset_minutes", "DVR Post Offset Minutes", dvr.value.get("post_offset_minutes", 0))
|
||||
create_setting("dvr_series_rules", "DVR Series Rules", dvr.value.get("series_rules", []))
|
||||
dvr.delete()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Split backup_settings
|
||||
try:
|
||||
backup = CoreSettings.objects.get(key="backup_settings")
|
||||
if isinstance(backup.value, dict):
|
||||
create_setting("backup_schedule_enabled", "Backup Schedule Enabled", backup.value.get("schedule_enabled", False))
|
||||
create_setting("backup_schedule_frequency", "Backup Schedule Frequency", backup.value.get("schedule_frequency", "weekly"))
|
||||
create_setting("backup_schedule_time", "Backup Schedule Time", backup.value.get("schedule_time", "02:00"))
|
||||
create_setting("backup_schedule_day_of_week", "Backup Schedule Day of Week", backup.value.get("schedule_day_of_week", 0))
|
||||
create_setting("backup_retention_count", "Backup Retention Count", backup.value.get("retention_count", 7))
|
||||
create_setting("backup_schedule_cron_expression", "Backup Schedule Cron Expression", backup.value.get("schedule_cron_expression", ""))
|
||||
backup.delete()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Split system_settings
|
||||
try:
|
||||
system = CoreSettings.objects.get(key="system_settings")
|
||||
if isinstance(system.value, dict):
|
||||
create_setting("system_time_zone", "System Time Zone", system.value.get("time_zone", "UTC"))
|
||||
create_setting("max_system_events", "Max System Events", system.value.get("max_system_events", 100))
|
||||
system.delete()
|
||||
except CoreSettings.DoesNotExist:
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_add_vlc_stream_profile'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# First, convert all data to valid JSON strings while column is still CharField
|
||||
migrations.RunPython(convert_string_to_json, migrations.RunPython.noop),
|
||||
# Then change the field type to JSONField
|
||||
migrations.AlterField(
|
||||
model_name='coresettings',
|
||||
name='value',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
# Finally, consolidate individual settings into grouped JSON objects
|
||||
migrations.RunPython(consolidate_settings, reverse_migration),
|
||||
]
|
||||
319
core/models.py
319
core/models.py
|
|
@ -1,4 +1,7 @@
|
|||
# core/models.py
|
||||
|
||||
from shlex import split as shlex_split
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
|
|
@ -133,7 +136,7 @@ class StreamProfile(models.Model):
|
|||
# Split the command and iterate through each part to apply replacements
|
||||
cmd = [self.command] + [
|
||||
self._replace_in_part(part, replacements)
|
||||
for part in self.parameters.split()
|
||||
for part in shlex_split(self.parameters) # use shlex to handle quoted strings
|
||||
]
|
||||
|
||||
return cmd
|
||||
|
|
@ -145,24 +148,13 @@ class StreamProfile(models.Model):
|
|||
return part
|
||||
|
||||
|
||||
DEFAULT_USER_AGENT_KEY = slugify("Default User-Agent")
|
||||
DEFAULT_STREAM_PROFILE_KEY = slugify("Default Stream Profile")
|
||||
STREAM_HASH_KEY = slugify("M3U Hash Key")
|
||||
PREFERRED_REGION_KEY = slugify("Preferred Region")
|
||||
AUTO_IMPORT_MAPPED_FILES = slugify("Auto-Import Mapped Files")
|
||||
NETWORK_ACCESS = slugify("Network Access")
|
||||
PROXY_SETTINGS_KEY = slugify("Proxy Settings")
|
||||
DVR_TV_TEMPLATE_KEY = slugify("DVR TV Template")
|
||||
DVR_MOVIE_TEMPLATE_KEY = slugify("DVR Movie Template")
|
||||
DVR_SERIES_RULES_KEY = slugify("DVR Series Rules")
|
||||
DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir")
|
||||
DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template")
|
||||
DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template")
|
||||
DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled")
|
||||
DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path")
|
||||
DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes")
|
||||
DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes")
|
||||
SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone")
|
||||
# Setting group keys
|
||||
STREAM_SETTINGS_KEY = "stream_settings"
|
||||
DVR_SETTINGS_KEY = "dvr_settings"
|
||||
BACKUP_SETTINGS_KEY = "backup_settings"
|
||||
PROXY_SETTINGS_KEY = "proxy_settings"
|
||||
NETWORK_ACCESS_KEY = "network_access"
|
||||
SYSTEM_SETTINGS_KEY = "system_settings"
|
||||
|
||||
|
||||
class CoreSettings(models.Model):
|
||||
|
|
@ -173,205 +165,208 @@ class CoreSettings(models.Model):
|
|||
name = models.CharField(
|
||||
max_length=255,
|
||||
)
|
||||
value = models.CharField(
|
||||
max_length=255,
|
||||
value = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Core Settings"
|
||||
|
||||
# Helper methods to get/set grouped settings
|
||||
@classmethod
|
||||
def _get_group(cls, key, defaults=None):
|
||||
"""Get a settings group, returning defaults if not found."""
|
||||
try:
|
||||
return cls.objects.get(key=key).value or (defaults or {})
|
||||
except cls.DoesNotExist:
|
||||
return defaults or {}
|
||||
|
||||
@classmethod
|
||||
def _update_group(cls, key, name, updates):
|
||||
"""Update specific fields in a settings group."""
|
||||
obj, created = cls.objects.get_or_create(
|
||||
key=key,
|
||||
defaults={"name": name, "value": {}}
|
||||
)
|
||||
current = obj.value if isinstance(obj.value, dict) else {}
|
||||
current.update(updates)
|
||||
obj.value = current
|
||||
obj.save()
|
||||
return current
|
||||
|
||||
# Stream Settings
|
||||
@classmethod
|
||||
def get_stream_settings(cls):
|
||||
"""Get all stream-related settings."""
|
||||
return cls._get_group(STREAM_SETTINGS_KEY, {
|
||||
"default_user_agent": None,
|
||||
"default_stream_profile": None,
|
||||
"m3u_hash_key": "",
|
||||
"preferred_region": None,
|
||||
"auto_import_mapped_files": None,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def get_default_user_agent_id(cls):
|
||||
"""Retrieve a system profile by name (or return None if not found)."""
|
||||
return cls.objects.get(key=DEFAULT_USER_AGENT_KEY).value
|
||||
return cls.get_stream_settings().get("default_user_agent")
|
||||
|
||||
@classmethod
|
||||
def get_default_stream_profile_id(cls):
|
||||
return cls.objects.get(key=DEFAULT_STREAM_PROFILE_KEY).value
|
||||
return cls.get_stream_settings().get("default_stream_profile")
|
||||
|
||||
@classmethod
|
||||
def get_m3u_hash_key(cls):
|
||||
return cls.objects.get(key=STREAM_HASH_KEY).value
|
||||
return cls.get_stream_settings().get("m3u_hash_key", "")
|
||||
|
||||
@classmethod
|
||||
def get_preferred_region(cls):
|
||||
"""Retrieve the preferred region setting (or return None if not found)."""
|
||||
try:
|
||||
return cls.objects.get(key=PREFERRED_REGION_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return None
|
||||
return cls.get_stream_settings().get("preferred_region")
|
||||
|
||||
@classmethod
|
||||
def get_auto_import_mapped_files(cls):
|
||||
"""Retrieve the preferred region setting (or return None if not found)."""
|
||||
try:
|
||||
return cls.objects.get(key=AUTO_IMPORT_MAPPED_FILES).value
|
||||
except cls.DoesNotExist:
|
||||
return None
|
||||
return cls.get_stream_settings().get("auto_import_mapped_files")
|
||||
|
||||
# DVR Settings
|
||||
@classmethod
|
||||
def get_proxy_settings(cls):
|
||||
"""Retrieve proxy settings as dict (or return defaults if not found)."""
|
||||
try:
|
||||
import json
|
||||
settings_json = cls.objects.get(key=PROXY_SETTINGS_KEY).value
|
||||
return json.loads(settings_json)
|
||||
except (cls.DoesNotExist, json.JSONDecodeError):
|
||||
# Return defaults if not found or invalid JSON
|
||||
return {
|
||||
"buffering_timeout": 15,
|
||||
"buffering_speed": 1.0,
|
||||
"redis_chunk_ttl": 60,
|
||||
"channel_shutdown_delay": 0,
|
||||
"channel_init_grace_period": 5,
|
||||
}
|
||||
def get_dvr_settings(cls):
|
||||
"""Get all DVR-related settings."""
|
||||
return cls._get_group(DVR_SETTINGS_KEY, {
|
||||
"tv_template": "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv",
|
||||
"movie_template": "Movies/{title} ({year}).mkv",
|
||||
"tv_fallback_dir": "TV_Shows",
|
||||
"tv_fallback_template": "TV_Shows/{show}/{start}.mkv",
|
||||
"movie_fallback_template": "Movies/{start}.mkv",
|
||||
"comskip_enabled": False,
|
||||
"comskip_custom_path": "",
|
||||
"pre_offset_minutes": 0,
|
||||
"post_offset_minutes": 0,
|
||||
"series_rules": [],
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def get_dvr_tv_template(cls):
|
||||
try:
|
||||
return cls.objects.get(key=DVR_TV_TEMPLATE_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
# Default: relative to recordings root (/data/recordings)
|
||||
return "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv"
|
||||
return cls.get_dvr_settings().get("tv_template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv")
|
||||
|
||||
@classmethod
|
||||
def get_dvr_movie_template(cls):
|
||||
try:
|
||||
return cls.objects.get(key=DVR_MOVIE_TEMPLATE_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return "Movies/{title} ({year}).mkv"
|
||||
return cls.get_dvr_settings().get("movie_template", "Movies/{title} ({year}).mkv")
|
||||
|
||||
@classmethod
|
||||
def get_dvr_tv_fallback_dir(cls):
|
||||
"""Folder name to use when a TV episode has no season/episode information.
|
||||
Defaults to 'TV_Show' to match existing behavior but can be overridden in settings.
|
||||
"""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_TV_FALLBACK_DIR_KEY).value or "TV_Shows"
|
||||
except cls.DoesNotExist:
|
||||
return "TV_Shows"
|
||||
return cls.get_dvr_settings().get("tv_fallback_dir", "TV_Shows")
|
||||
|
||||
@classmethod
|
||||
def get_dvr_tv_fallback_template(cls):
|
||||
"""Full path template used when season/episode are missing for a TV airing."""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_TV_FALLBACK_TEMPLATE_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
# default requested by user
|
||||
return "TV_Shows/{show}/{start}.mkv"
|
||||
return cls.get_dvr_settings().get("tv_fallback_template", "TV_Shows/{show}/{start}.mkv")
|
||||
|
||||
@classmethod
|
||||
def get_dvr_movie_fallback_template(cls):
|
||||
"""Full path template used when movie metadata is incomplete."""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_MOVIE_FALLBACK_TEMPLATE_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return "Movies/{start}.mkv"
|
||||
return cls.get_dvr_settings().get("movie_fallback_template", "Movies/{start}.mkv")
|
||||
|
||||
@classmethod
|
||||
def get_dvr_comskip_enabled(cls):
|
||||
"""Return boolean-like string value ('true'/'false') for comskip enablement."""
|
||||
try:
|
||||
val = cls.objects.get(key=DVR_COMSKIP_ENABLED_KEY).value
|
||||
return str(val).lower() in ("1", "true", "yes", "on")
|
||||
except cls.DoesNotExist:
|
||||
return False
|
||||
return bool(cls.get_dvr_settings().get("comskip_enabled", False))
|
||||
|
||||
@classmethod
|
||||
def get_dvr_comskip_custom_path(cls):
|
||||
"""Return configured comskip.ini path or empty string if unset."""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return ""
|
||||
return cls.get_dvr_settings().get("comskip_custom_path", "")
|
||||
|
||||
@classmethod
|
||||
def set_dvr_comskip_custom_path(cls, path: str | None):
|
||||
"""Persist the comskip.ini path setting, normalizing nulls to empty string."""
|
||||
value = (path or "").strip()
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=DVR_COMSKIP_CUSTOM_PATH_KEY,
|
||||
defaults={"name": "DVR Comskip Custom Path", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
cls._update_group(DVR_SETTINGS_KEY, "DVR Settings", {"comskip_custom_path": value})
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def get_dvr_pre_offset_minutes(cls):
|
||||
"""Minutes to start recording before scheduled start (default 0)."""
|
||||
try:
|
||||
val = cls.objects.get(key=DVR_PRE_OFFSET_MINUTES_KEY).value
|
||||
return int(val)
|
||||
except cls.DoesNotExist:
|
||||
return 0
|
||||
except Exception:
|
||||
try:
|
||||
return int(float(val))
|
||||
except Exception:
|
||||
return 0
|
||||
return int(cls.get_dvr_settings().get("pre_offset_minutes", 0) or 0)
|
||||
|
||||
@classmethod
|
||||
def get_dvr_post_offset_minutes(cls):
|
||||
"""Minutes to stop recording after scheduled end (default 0)."""
|
||||
try:
|
||||
val = cls.objects.get(key=DVR_POST_OFFSET_MINUTES_KEY).value
|
||||
return int(val)
|
||||
except cls.DoesNotExist:
|
||||
return 0
|
||||
except Exception:
|
||||
try:
|
||||
return int(float(val))
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def get_system_time_zone(cls):
|
||||
"""Return configured system time zone or fall back to Django settings."""
|
||||
try:
|
||||
value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value
|
||||
if value:
|
||||
return value
|
||||
except cls.DoesNotExist:
|
||||
pass
|
||||
return getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
|
||||
@classmethod
|
||||
def set_system_time_zone(cls, tz_name: str | None):
|
||||
"""Persist the desired system time zone identifier."""
|
||||
value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=SYSTEM_TIME_ZONE_KEY,
|
||||
defaults={"name": "System Time Zone", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
return value
|
||||
return int(cls.get_dvr_settings().get("post_offset_minutes", 0) or 0)
|
||||
|
||||
@classmethod
|
||||
def get_dvr_series_rules(cls):
|
||||
"""Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}"""
|
||||
import json
|
||||
try:
|
||||
raw = cls.objects.get(key=DVR_SERIES_RULES_KEY).value
|
||||
rules = json.loads(raw) if raw else []
|
||||
if isinstance(rules, list):
|
||||
return rules
|
||||
return []
|
||||
except cls.DoesNotExist:
|
||||
# Initialize empty if missing
|
||||
cls.objects.create(key=DVR_SERIES_RULES_KEY, name="DVR Series Rules", value="[]")
|
||||
return []
|
||||
return cls.get_dvr_settings().get("series_rules", [])
|
||||
|
||||
@classmethod
|
||||
def set_dvr_series_rules(cls, rules):
|
||||
import json
|
||||
try:
|
||||
obj, _ = cls.objects.get_or_create(key=DVR_SERIES_RULES_KEY, defaults={"name": "DVR Series Rules", "value": "[]"})
|
||||
obj.value = json.dumps(rules)
|
||||
obj.save(update_fields=["value"])
|
||||
return rules
|
||||
except Exception:
|
||||
return rules
|
||||
cls._update_group(DVR_SETTINGS_KEY, "DVR Settings", {"series_rules": rules})
|
||||
return rules
|
||||
|
||||
# Proxy Settings
|
||||
@classmethod
|
||||
def get_proxy_settings(cls):
|
||||
"""Get proxy settings."""
|
||||
return cls._get_group(PROXY_SETTINGS_KEY, {
|
||||
"buffering_timeout": 15,
|
||||
"buffering_speed": 1.0,
|
||||
"redis_chunk_ttl": 60,
|
||||
"channel_shutdown_delay": 0,
|
||||
"channel_init_grace_period": 5,
|
||||
})
|
||||
|
||||
# System Settings
|
||||
@classmethod
|
||||
def get_system_settings(cls):
|
||||
"""Get all system-related settings."""
|
||||
return cls._get_group(SYSTEM_SETTINGS_KEY, {
|
||||
"time_zone": getattr(settings, "TIME_ZONE", "UTC") or "UTC",
|
||||
"max_system_events": 100,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def get_system_time_zone(cls):
|
||||
return cls.get_system_settings().get("time_zone") or getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
|
||||
@classmethod
|
||||
def set_system_time_zone(cls, tz_name: str | None):
|
||||
value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
cls._update_group(SYSTEM_SETTINGS_KEY, "System Settings", {"time_zone": value})
|
||||
return value
|
||||
|
||||
|
||||
class SystemEvent(models.Model):
|
||||
"""
|
||||
Tracks system events like channel start/stop, buffering, failover, client connections.
|
||||
Maintains a rolling history based on max_system_events setting.
|
||||
"""
|
||||
EVENT_TYPES = [
|
||||
('channel_start', 'Channel Started'),
|
||||
('channel_stop', 'Channel Stopped'),
|
||||
('channel_buffering', 'Channel Buffering'),
|
||||
('channel_failover', 'Channel Failover'),
|
||||
('channel_reconnect', 'Channel Reconnected'),
|
||||
('channel_error', 'Channel Error'),
|
||||
('client_connect', 'Client Connected'),
|
||||
('client_disconnect', 'Client Disconnected'),
|
||||
('recording_start', 'Recording Started'),
|
||||
('recording_end', 'Recording Ended'),
|
||||
('stream_switch', 'Stream Switched'),
|
||||
('m3u_refresh', 'M3U Refreshed'),
|
||||
('m3u_download', 'M3U Downloaded'),
|
||||
('epg_refresh', 'EPG Refreshed'),
|
||||
('epg_download', 'EPG Downloaded'),
|
||||
('login_success', 'Login Successful'),
|
||||
('login_failed', 'Login Failed'),
|
||||
('logout', 'User Logged Out'),
|
||||
('m3u_blocked', 'M3U Download Blocked'),
|
||||
('epg_blocked', 'EPG Download Blocked'),
|
||||
]
|
||||
|
||||
event_type = models.CharField(max_length=50, choices=EVENT_TYPES, db_index=True)
|
||||
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
channel_id = models.UUIDField(null=True, blank=True, db_index=True)
|
||||
channel_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
details = models.JSONField(default=dict, blank=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-timestamp']
|
||||
indexes = [
|
||||
models.Index(fields=['-timestamp']),
|
||||
models.Index(fields=['event_type', '-timestamp']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.event_type} - {self.channel_name or 'N/A'} @ {self.timestamp}"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import ipaddress
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import CoreSettings, UserAgent, StreamProfile, NETWORK_ACCESS
|
||||
from .models import CoreSettings, UserAgent, StreamProfile, NETWORK_ACCESS_KEY
|
||||
|
||||
|
||||
class UserAgentSerializer(serializers.ModelSerializer):
|
||||
|
|
@ -40,10 +40,10 @@ class CoreSettingsSerializer(serializers.ModelSerializer):
|
|||
fields = "__all__"
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if instance.key == NETWORK_ACCESS:
|
||||
if instance.key == NETWORK_ACCESS_KEY:
|
||||
errors = False
|
||||
invalid = {}
|
||||
value = json.loads(validated_data.get("value"))
|
||||
value = validated_data.get("value")
|
||||
for key, val in value.items():
|
||||
cidrs = val.split(",")
|
||||
for cidr in cidrs:
|
||||
|
|
|
|||
|
|
@ -513,7 +513,8 @@ def rehash_streams(keys):
|
|||
|
||||
for obj in batch:
|
||||
# Generate new hash
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id)
|
||||
group_name = obj.channel_group.name if obj.channel_group else None
|
||||
new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id, group=group_name)
|
||||
|
||||
# Check if this hash already exists in our tracking dict or in database
|
||||
if new_hash in hash_keys:
|
||||
|
|
|
|||
|
|
@ -388,3 +388,52 @@ def validate_flexible_url(value):
|
|||
|
||||
# If it doesn't match our flexible patterns, raise the original error
|
||||
raise ValidationError("Enter a valid URL.")
|
||||
|
||||
|
||||
def log_system_event(event_type, channel_id=None, channel_name=None, **details):
|
||||
"""
|
||||
Log a system event and maintain the configured max history.
|
||||
|
||||
Args:
|
||||
event_type: Type of event (e.g., 'channel_start', 'client_connect')
|
||||
channel_id: Optional UUID of the channel
|
||||
channel_name: Optional name of the channel
|
||||
**details: Additional details to store in the event (stored as JSON)
|
||||
|
||||
Example:
|
||||
log_system_event('channel_start', channel_id=uuid, channel_name='CNN',
|
||||
stream_url='http://...', user='admin')
|
||||
"""
|
||||
from core.models import SystemEvent, CoreSettings
|
||||
|
||||
try:
|
||||
# Create the event
|
||||
SystemEvent.objects.create(
|
||||
event_type=event_type,
|
||||
channel_id=channel_id,
|
||||
channel_name=channel_name,
|
||||
details=details
|
||||
)
|
||||
|
||||
# Get max events from settings (default 100)
|
||||
try:
|
||||
from .models import CoreSettings
|
||||
system_settings = CoreSettings.objects.filter(key='system_settings').first()
|
||||
if system_settings and isinstance(system_settings.value, dict):
|
||||
max_events = int(system_settings.value.get('max_system_events', 100))
|
||||
else:
|
||||
max_events = 100
|
||||
except Exception:
|
||||
max_events = 100
|
||||
|
||||
# Delete old events beyond the limit (keep it efficient with a single query)
|
||||
total_count = SystemEvent.objects.count()
|
||||
if total_count > max_events:
|
||||
# Get the ID of the event at the cutoff point
|
||||
cutoff_event = SystemEvent.objects.values_list('id', flat=True)[max_events]
|
||||
# Delete all events with ID less than cutoff (older events)
|
||||
SystemEvent.objects.filter(id__lt=cutoff_event).delete()
|
||||
|
||||
except Exception as e:
|
||||
# Don't let event logging break the main application
|
||||
logger.error(f"Failed to log system event {event_type}: {e}")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
# core/views.py
|
||||
import os
|
||||
from shlex import split as shlex_split
|
||||
import sys
|
||||
import subprocess
|
||||
import logging
|
||||
|
|
@ -37,7 +38,9 @@ def stream_view(request, channel_uuid):
|
|||
"""
|
||||
try:
|
||||
redis_host = getattr(settings, "REDIS_HOST", "localhost")
|
||||
redis_client = redis.Redis(host=settings.REDIS_HOST, port=6379, db=int(getattr(settings, "REDIS_DB", "0")))
|
||||
redis_port = int(getattr(settings, "REDIS_PORT", 6379))
|
||||
redis_db = int(getattr(settings, "REDIS_DB", "0"))
|
||||
redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||
|
||||
# Retrieve the channel by the provided stream_id.
|
||||
channel = Channel.objects.get(uuid=channel_uuid)
|
||||
|
|
@ -129,7 +132,7 @@ def stream_view(request, channel_uuid):
|
|||
stream_profile = channel.stream_profile
|
||||
if not stream_profile:
|
||||
logger.error("No stream profile set for channel ID=%s, using default", channel.id)
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.objects.get(key="default-stream-profile").value)
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
|
||||
|
||||
logger.debug("Stream profile used: %s", stream_profile.name)
|
||||
|
||||
|
|
@ -142,7 +145,7 @@ def stream_view(request, channel_uuid):
|
|||
logger.debug("Formatted parameters: %s", parameters)
|
||||
|
||||
# Build the final command.
|
||||
cmd = [stream_profile.command] + parameters.split()
|
||||
cmd = [stream_profile.command] + shlex_split(parameters)
|
||||
logger.debug("Executing command: %s", cmd)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -73,8 +73,12 @@ class PersistentLock:
|
|||
|
||||
# Example usage (for testing purposes only):
|
||||
if __name__ == "__main__":
|
||||
# Connect to Redis on localhost; adjust connection parameters as needed.
|
||||
client = redis.Redis(host="localhost", port=6379, db=0)
|
||||
import os
|
||||
# Connect to Redis using environment variables; adjust connection parameters as needed.
|
||||
redis_host = os.environ.get("REDIS_HOST", "localhost")
|
||||
redis_port = int(os.environ.get("REDIS_PORT", 6379))
|
||||
redis_db = int(os.environ.get("REDIS_DB", 0))
|
||||
client = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
|
||||
lock = PersistentLock(client, "lock:example_account", lock_timeout=120)
|
||||
|
||||
if lock.acquire():
|
||||
|
|
|
|||
|
|
@ -4,8 +4,9 @@ from datetime import timedelta
|
|||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET"
|
||||
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY")
|
||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||
REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
|
||||
REDIS_DB = os.environ.get("REDIS_DB", "0")
|
||||
|
||||
# Set DEBUG to True for development, False for production
|
||||
|
|
@ -118,7 +119,7 @@ CHANNEL_LAYERS = {
|
|||
"default": {
|
||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [(REDIS_HOST, 6379, REDIS_DB)], # Ensure Redis is running
|
||||
"hosts": [(REDIS_HOST, REDIS_PORT, REDIS_DB)], # Ensure Redis is running
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -184,8 +185,10 @@ STATICFILES_DIRS = [
|
|||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
# Build default Redis URL from components for Celery
|
||||
_default_redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", _default_redis_url)
|
||||
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", CELERY_BROKER_URL)
|
||||
|
||||
# Configure Redis key prefix
|
||||
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
|
||||
|
|
@ -226,6 +229,13 @@ CELERY_BEAT_SCHEDULE = {
|
|||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
MEDIA_URL = "/media/"
|
||||
|
||||
# Backup settings
|
||||
BACKUP_ROOT = os.environ.get("BACKUP_ROOT", "/data/backups")
|
||||
BACKUP_DATA_DIRS = [
|
||||
os.environ.get("LOGOS_DIR", "/data/logos"),
|
||||
os.environ.get("UPLOADS_DIR", "/data/uploads"),
|
||||
os.environ.get("PLUGINS_DIR", "/data/plugins"),
|
||||
]
|
||||
|
||||
SERVER_IP = "127.0.0.1"
|
||||
|
||||
|
|
@ -242,7 +252,7 @@ SIMPLE_JWT = {
|
|||
}
|
||||
|
||||
# Redis connection settings
|
||||
REDIS_URL = "redis://localhost:6379/0"
|
||||
REDIS_URL = os.environ.get("REDIS_URL", f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
|
||||
REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds
|
||||
REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import ipaddress
|
||||
from django.http import JsonResponse
|
||||
from django.core.exceptions import ValidationError
|
||||
from core.models import CoreSettings, NETWORK_ACCESS
|
||||
from core.models import CoreSettings, NETWORK_ACCESS_KEY
|
||||
|
||||
|
||||
def json_error_response(message, status=400):
|
||||
|
|
@ -39,12 +39,15 @@ def get_client_ip(request):
|
|||
|
||||
|
||||
def network_access_allowed(request, settings_key):
|
||||
network_access = json.loads(CoreSettings.objects.get(key=NETWORK_ACCESS).value)
|
||||
try:
|
||||
network_access = CoreSettings.objects.get(key=NETWORK_ACCESS_KEY).value
|
||||
except CoreSettings.DoesNotExist:
|
||||
network_access = {}
|
||||
|
||||
cidrs = (
|
||||
network_access[settings_key].split(",")
|
||||
if settings_key in network_access
|
||||
else ["0.0.0.0/0"]
|
||||
else ["0.0.0.0/0", "::/0"]
|
||||
)
|
||||
|
||||
network_allowed = False
|
||||
|
|
|
|||
|
|
@ -4,26 +4,44 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||
ENV VIRTUAL_ENV=/dispatcharrpy
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
# --- Install Python 3.13 and system dependencies ---
|
||||
# --- Install Python 3.13 and build dependencies ---
|
||||
# Note: Hardware acceleration (VA-API, VDPAU, NVENC) already included in base ffmpeg image
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
ca-certificates software-properties-common gnupg2 curl wget \
|
||||
&& add-apt-repository ppa:deadsnakes/ppa \
|
||||
&& apt-get update \
|
||||
&& apt-get install --no-install-recommends -y \
|
||||
python3.13 python3.13-dev python3.13-venv \
|
||||
python3.13 python3.13-dev python3.13-venv libpython3.13 \
|
||||
python-is-python3 python3-pip \
|
||||
libpcre3 libpcre3-dev libpq-dev procps \
|
||||
build-essential gcc pciutils \
|
||||
nginx streamlink comskip\
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
libpcre3 libpcre3-dev libpq-dev procps pciutils \
|
||||
nginx streamlink comskip \
|
||||
vlc-bin vlc-plugin-base \
|
||||
build-essential gcc g++ gfortran libopenblas-dev libopenblas0 ninja-build
|
||||
|
||||
# --- Create Python virtual environment ---
|
||||
RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pip
|
||||
|
||||
# --- Install Python dependencies ---
|
||||
COPY requirements.txt /tmp/requirements.txt
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \
|
||||
rm /tmp/requirements.txt
|
||||
|
||||
# --- Build legacy NumPy wheel for old hardware (store for runtime switching) ---
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir build && \
|
||||
cd /tmp && \
|
||||
$VIRTUAL_ENV/bin/pip download --no-binary numpy --no-deps numpy && \
|
||||
tar -xzf numpy-*.tar.gz && \
|
||||
cd numpy-*/ && \
|
||||
$VIRTUAL_ENV/bin/python -m build --wheel -Csetup-args=-Dcpu-baseline="none" -Csetup-args=-Dcpu-dispatch="none" && \
|
||||
mv dist/*.whl /opt/ && \
|
||||
cd / && rm -rf /tmp/numpy-* /tmp/*.tar.gz && \
|
||||
$VIRTUAL_ENV/bin/pip uninstall -y build
|
||||
|
||||
# --- Clean up build dependencies to reduce image size ---
|
||||
RUN apt-get remove -y build-essential gcc g++ gfortran libopenblas-dev libpcre3-dev python3.13-dev ninja-build && \
|
||||
apt-get autoremove -y --purge && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /root/.cache /tmp/*
|
||||
|
||||
# --- Set up Redis 7.x ---
|
||||
RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
|
|
|
|||
|
|
@ -35,9 +35,6 @@ RUN rm -rf /app/frontend
|
|||
# Copy built frontend assets
|
||||
COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist
|
||||
|
||||
# Run Django collectstatic
|
||||
RUN python manage.py collectstatic --noinput
|
||||
|
||||
# Add timestamp argument
|
||||
ARG TIMESTAMP
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,65 @@
|
|||
#!/bin/bash
|
||||
docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile ..
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Default values
|
||||
VERSION=$(python3 -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
||||
REGISTRY="dispatcharr" # Registry or private repo to push to
|
||||
IMAGE="dispatcharr" # Image that we're building
|
||||
BRANCH="dev"
|
||||
ARCH="" # Architectures to build for, e.g. linux/amd64,linux/arm64
|
||||
PUSH=false
|
||||
|
||||
usage() {
|
||||
cat <<- EOF
|
||||
To test locally:
|
||||
./build-dev.sh
|
||||
|
||||
To build and push to registry:
|
||||
./build-dev.sh -p
|
||||
|
||||
To build and push to a private registry:
|
||||
./build-dev.sh -p -r myregistry:5000
|
||||
|
||||
To build for -both- x86_64 and arm_64:
|
||||
./build-dev.sh -p -a linux/amd64,linux/arm64
|
||||
|
||||
Do it all:
|
||||
./build-dev.sh -p -r myregistry:5000 -a linux/amd64,linux/arm64
|
||||
EOF
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Parse options
|
||||
while getopts "pr:a:b:i:h" opt; do
|
||||
case $opt in
|
||||
r) REGISTRY="$OPTARG" ;;
|
||||
a) ARCH="--platform $OPTARG" ;;
|
||||
b) BRANCH="$OPTARG" ;;
|
||||
i) IMAGE="$OPTARG" ;;
|
||||
p) PUSH=true ;;
|
||||
h) usage ;;
|
||||
\?) echo "Invalid option: -$OPTARG" >&2; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
BUILD_ARGS="BRANCH=$BRANCH"
|
||||
|
||||
echo docker build --build-arg $BUILD_ARGS $ARCH -t $IMAGE
|
||||
docker build -f Dockerfile --build-arg $BUILD_ARGS $ARCH -t $IMAGE ..
|
||||
docker tag $IMAGE $IMAGE:$BRANCH
|
||||
docker tag $IMAGE $IMAGE:$VERSION
|
||||
|
||||
if [ -z "$PUSH" ]; then
|
||||
echo "Please run 'docker push -t $IMAGE:dev -t $IMAGE:${VERSION}' when ready"
|
||||
else
|
||||
for TAG in latest "$VERSION" "$BRANCH"; do
|
||||
docker tag "$IMAGE" "$REGISTRY/$IMAGE:$TAG"
|
||||
docker push -q "$REGISTRY/$IMAGE:$TAG"
|
||||
done
|
||||
echo "Images pushed successfully."
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
# Get version information
|
||||
VERSION=$(python -c "import sys; sys.path.append('..'); import version; print(version.__version__)")
|
||||
|
||||
# Build with version tag
|
||||
docker build --build-arg BRANCH=dev \
|
||||
-t dispatcharr/dispatcharr:dev \
|
||||
-t dispatcharr/dispatcharr:${VERSION} \
|
||||
-f Dockerfile ..
|
||||
|
|
|
|||
|
|
@ -14,6 +14,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=info
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=trace
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=debug
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ services:
|
|||
- REDIS_HOST=redis
|
||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=info
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -27,6 +27,18 @@ echo_with_timestamp() {
|
|||
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1"
|
||||
}
|
||||
|
||||
# --- NumPy version switching for legacy hardware ---
|
||||
if [ "$USE_LEGACY_NUMPY" = "true" ]; then
|
||||
# Check if NumPy was compiled with baseline support
|
||||
if /dispatcharrpy/bin/python -c "import numpy; numpy.show_config()" 2>&1 | grep -qi "baseline"; then
|
||||
echo_with_timestamp "🔧 Switching to legacy NumPy (no CPU baseline)..."
|
||||
/dispatcharrpy/bin/pip install --no-cache-dir --force-reinstall --no-deps /opt/numpy-*.whl
|
||||
echo_with_timestamp "✅ Legacy NumPy installed"
|
||||
else
|
||||
echo_with_timestamp "✅ Legacy NumPy (no baseline) already installed, skipping reinstallation"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set PostgreSQL environment variables
|
||||
export POSTGRES_DB=${POSTGRES_DB:-dispatcharr}
|
||||
export POSTGRES_USER=${POSTGRES_USER:-dispatch}
|
||||
|
|
@ -40,6 +52,21 @@ export REDIS_DB=${REDIS_DB:-0}
|
|||
export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191}
|
||||
export LIBVA_DRIVERS_PATH='/usr/local/lib/x86_64-linux-gnu/dri'
|
||||
export LD_LIBRARY_PATH='/usr/local/lib'
|
||||
export SECRET_FILE="/data/jwt"
|
||||
# Ensure Django secret key exists or generate a new one
|
||||
if [ ! -f "$SECRET_FILE" ]; then
|
||||
echo "Generating new Django secret key..."
|
||||
old_umask=$(umask)
|
||||
umask 077
|
||||
tmpfile="$(mktemp "${SECRET_FILE}.XXXXXX")" || { echo "mktemp failed"; exit 1; }
|
||||
python3 - <<'PY' >"$tmpfile" || { echo "secret generation failed"; rm -f "$tmpfile"; exit 1; }
|
||||
import secrets
|
||||
print(secrets.token_urlsafe(64))
|
||||
PY
|
||||
mv -f "$tmpfile" "$SECRET_FILE" || { echo "move failed"; rm -f "$tmpfile"; exit 1; }
|
||||
umask $old_umask
|
||||
fi
|
||||
export DJANGO_SECRET_KEY="$(cat "$SECRET_FILE")"
|
||||
|
||||
# Process priority configuration
|
||||
# UWSGI_NICE_LEVEL: Absolute nice value for uWSGI/streaming (default: 0 = normal priority)
|
||||
|
|
@ -85,12 +112,12 @@ export POSTGRES_DIR=/data/db
|
|||
if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then
|
||||
# Define all variables to process
|
||||
variables=(
|
||||
PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED
|
||||
PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED PYTHONDONTWRITEBYTECODE
|
||||
POSTGRES_DB POSTGRES_USER POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT
|
||||
DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL
|
||||
REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT
|
||||
DISPATCHARR_VERSION DISPATCHARR_TIMESTAMP LIBVA_DRIVERS_PATH LIBVA_DRIVER_NAME LD_LIBRARY_PATH
|
||||
CELERY_NICE_LEVEL UWSGI_NICE_LEVEL
|
||||
CELERY_NICE_LEVEL UWSGI_NICE_LEVEL DJANGO_SECRET_KEY
|
||||
)
|
||||
|
||||
# Process each variable for both profile.d and environment
|
||||
|
|
@ -159,9 +186,9 @@ else
|
|||
pids+=("$nginx_pid")
|
||||
fi
|
||||
|
||||
cd /app
|
||||
python manage.py migrate --noinput
|
||||
python manage.py collectstatic --noinput
|
||||
# Run Django commands as non-root user to prevent permission issues
|
||||
su - $POSTGRES_USER -c "cd /app && python manage.py migrate --noinput"
|
||||
su - $POSTGRES_USER -c "cd /app && python manage.py collectstatic --noinput"
|
||||
|
||||
# Select proper uwsgi config based on environment
|
||||
if [ "$DISPATCHARR_ENV" = "dev" ] && [ "$DISPATCHARR_DEBUG" != "true" ]; then
|
||||
|
|
@ -187,7 +214,7 @@ fi
|
|||
# Users can override via UWSGI_NICE_LEVEL environment variable in docker-compose
|
||||
# Start with nice as root, then use setpriv to drop privileges to dispatch user
|
||||
# This preserves both the nice value and environment variables
|
||||
nice -n $UWSGI_NICE_LEVEL su -p - "$POSTGRES_USER" -c "cd /app && exec uwsgi $uwsgi_args" & uwsgi_pid=$!
|
||||
nice -n $UWSGI_NICE_LEVEL su - "$POSTGRES_USER" -c "cd /app && exec /dispatcharrpy/bin/uwsgi $uwsgi_args" & uwsgi_pid=$!
|
||||
echo "✅ uwsgi started with PID $uwsgi_pid (nice $UWSGI_NICE_LEVEL)"
|
||||
pids+=("$uwsgi_pid")
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ DATA_DIRS=(
|
|||
APP_DIRS=(
|
||||
"/app/logo_cache"
|
||||
"/app/media"
|
||||
"/app/static"
|
||||
)
|
||||
|
||||
# Create all directories
|
||||
|
|
@ -29,9 +30,21 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then
|
|||
chown $PUID:$PGID /app
|
||||
fi
|
||||
fi
|
||||
|
||||
# Configure nginx port
|
||||
if ! [[ "$DISPATCHARR_PORT" =~ ^[0-9]+$ ]]; then
|
||||
echo "⚠️ Warning: DISPATCHARR_PORT is not a valid integer, using default port 9191"
|
||||
DISPATCHARR_PORT=9191
|
||||
fi
|
||||
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
||||
|
||||
# Configure nginx based on IPv6 availability
|
||||
if ip -6 addr show | grep -q "inet6"; then
|
||||
echo "✅ IPv6 is available, enabling IPv6 in nginx"
|
||||
else
|
||||
echo "⚠️ IPv6 not available, disabling IPv6 in nginx"
|
||||
sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default
|
||||
fi
|
||||
|
||||
# NOTE: mac doesn't run as root, so only manage permissions
|
||||
# if this script is running as root
|
||||
if [ "$(id -u)" = "0" ]; then
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ proxy_cache_path /app/logo_cache levels=1:2 keys_zone=logo_cache:10m
|
|||
|
||||
server {
|
||||
listen NGINX_PORT;
|
||||
listen [::]:NGINX_PORT;
|
||||
|
||||
proxy_connect_timeout 75;
|
||||
proxy_send_timeout 300;
|
||||
|
|
@ -34,6 +35,13 @@ server {
|
|||
root /data;
|
||||
}
|
||||
|
||||
# Internal location for X-Accel-Redirect backup downloads
|
||||
# Django handles auth, nginx serves the file directly
|
||||
location /protected-backups/ {
|
||||
internal;
|
||||
alias /data/backups/;
|
||||
}
|
||||
|
||||
location /api/logos/(?<logo_id>\d+)/cache/ {
|
||||
proxy_pass http://127.0.0.1:5656;
|
||||
proxy_cache logo_cache;
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ module = scripts.debug_wrapper:application
|
|||
virtualenv = /dispatcharrpy
|
||||
master = true
|
||||
env = DJANGO_SETTINGS_MODULE=dispatcharr.settings
|
||||
|
||||
socket = /app/uwsgi.sock
|
||||
chmod-socket = 777
|
||||
vacuum = true
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ module = dispatcharr.wsgi:application
|
|||
virtualenv = /dispatcharrpy
|
||||
master = true
|
||||
env = DJANGO_SETTINGS_MODULE=dispatcharr.settings
|
||||
env = USE_NGINX_ACCEL=true
|
||||
socket = /app/uwsgi.sock
|
||||
chmod-socket = 777
|
||||
vacuum = true
|
||||
|
|
@ -36,6 +37,7 @@ http-keepalive = 1
|
|||
buffer-size = 65536 # Increase buffer for large payloads
|
||||
post-buffering = 4096 # Reduce buffering for real-time streaming
|
||||
http-timeout = 600 # Prevent disconnects from long streams
|
||||
socket-timeout = 600 # Prevent write timeouts when client buffers
|
||||
lazy-apps = true # Improve memory efficiency
|
||||
|
||||
# Async mode (use gevent for high concurrency)
|
||||
|
|
@ -57,4 +59,4 @@ logformat-strftime = true
|
|||
log-date = %%Y-%%m-%%d %%H:%%M:%%S,000
|
||||
# Use formatted time with environment variable for log level
|
||||
log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
|
||||
log-buffering = 1024 # Add buffer size limit for logging
|
||||
log-buffering = 1024 # Add buffer size limit for logging
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@
|
|||
"model": "core.streamprofile",
|
||||
"pk": 1,
|
||||
"fields": {
|
||||
"profile_name": "ffmpeg",
|
||||
"profile_name": "FFmpeg",
|
||||
"command": "ffmpeg",
|
||||
"parameters": "-i {streamUrl} -c:a copy -c:v copy -f mpegts pipe:1",
|
||||
"is_active": true,
|
||||
|
|
@ -46,13 +46,23 @@
|
|||
{
|
||||
"model": "core.streamprofile",
|
||||
"fields": {
|
||||
"profile_name": "streamlink",
|
||||
"profile_name": "Streamlink",
|
||||
"command": "streamlink",
|
||||
"parameters": "{streamUrl} best --stdout",
|
||||
"is_active": true,
|
||||
"user_agent": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "core.streamprofile",
|
||||
"fields": {
|
||||
"profile_name": "VLC",
|
||||
"command": "cvlc",
|
||||
"parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}",
|
||||
"is_active": true,
|
||||
"user_agent": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "core.coresettings",
|
||||
"fields": {
|
||||
|
|
|
|||
1395
frontend/package-lock.json
generated
1395
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -23,11 +23,12 @@
|
|||
"@mantine/form": "~8.0.1",
|
||||
"@mantine/hooks": "~8.0.1",
|
||||
"@mantine/notifications": "~8.0.1",
|
||||
"@hookform/resolvers": "^5.2.2",
|
||||
"@tanstack/react-table": "^8.21.2",
|
||||
"allotment": "^1.20.4",
|
||||
"dayjs": "^1.11.13",
|
||||
"formik": "^2.4.6",
|
||||
"hls.js": "^1.5.20",
|
||||
"react-hook-form": "^7.70.0",
|
||||
"lucide-react": "^0.511.0",
|
||||
"mpegts.js": "^1.8.0",
|
||||
"react": "^19.1.0",
|
||||
|
|
@ -54,18 +55,21 @@
|
|||
"@types/react": "^19.1.0",
|
||||
"@types/react-dom": "^19.1.0",
|
||||
"@vitejs/plugin-react-swc": "^4.1.0",
|
||||
"eslint": "^9.21.0",
|
||||
"eslint": "^9.27.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.19",
|
||||
"globals": "^15.15.0",
|
||||
"jsdom": "^27.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"vite": "^6.2.0",
|
||||
"vite": "^7.1.7",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"resolutions": {
|
||||
"vite": "7.1.7",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
},
|
||||
"overrides": {
|
||||
"js-yaml": "^4.1.1"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ import Users from './pages/Users';
|
|||
import LogosPage from './pages/Logos';
|
||||
import VODsPage from './pages/VODs';
|
||||
import useAuthStore from './store/auth';
|
||||
import useLogosStore from './store/logos';
|
||||
import FloatingVideo from './components/FloatingVideo';
|
||||
import { WebsocketProvider } from './WebSocket';
|
||||
import { Box, AppShell, MantineProvider } from '@mantine/core';
|
||||
|
|
@ -40,8 +39,6 @@ const defaultRoute = '/channels';
|
|||
|
||||
const App = () => {
|
||||
const [open, setOpen] = useState(true);
|
||||
const [backgroundLoadingStarted, setBackgroundLoadingStarted] =
|
||||
useState(false);
|
||||
const isAuthenticated = useAuthStore((s) => s.isAuthenticated);
|
||||
const setIsAuthenticated = useAuthStore((s) => s.setIsAuthenticated);
|
||||
const logout = useAuthStore((s) => s.logout);
|
||||
|
|
@ -81,11 +78,7 @@ const App = () => {
|
|||
const loggedIn = await initializeAuth();
|
||||
if (loggedIn) {
|
||||
await initData();
|
||||
// Start background logo loading after app is fully initialized (only once)
|
||||
if (!backgroundLoadingStarted) {
|
||||
setBackgroundLoadingStarted(true);
|
||||
useLogosStore.getState().startBackgroundLoading();
|
||||
}
|
||||
// Logos are now loaded at the end of initData, no need for background loading
|
||||
} else {
|
||||
await logout();
|
||||
}
|
||||
|
|
@ -96,7 +89,7 @@ const App = () => {
|
|||
};
|
||||
|
||||
checkAuth();
|
||||
}, [initializeAuth, initData, logout, backgroundLoadingStarted]);
|
||||
}, [initializeAuth, initData, logout]);
|
||||
|
||||
return (
|
||||
<MantineProvider
|
||||
|
|
|
|||
|
|
@ -569,15 +569,25 @@ export const WebsocketProvider = ({ children }) => {
|
|||
break;
|
||||
|
||||
case 'epg_refresh':
|
||||
// Update the store with progress information
|
||||
updateEPGProgress(parsedEvent.data);
|
||||
|
||||
// If we have source/account info, update the EPG source status
|
||||
// If we have source/account info, check if EPG exists before processing
|
||||
if (parsedEvent.data.source || parsedEvent.data.account) {
|
||||
const sourceId =
|
||||
parsedEvent.data.source || parsedEvent.data.account;
|
||||
const epg = epgs[sourceId];
|
||||
|
||||
// Only update progress if the EPG still exists in the store
|
||||
// This prevents crashes when receiving updates for deleted EPGs
|
||||
if (epg) {
|
||||
// Update the store with progress information
|
||||
updateEPGProgress(parsedEvent.data);
|
||||
} else {
|
||||
// EPG was deleted, ignore this update
|
||||
console.debug(
|
||||
`Ignoring EPG refresh update for deleted EPG ${sourceId}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
if (epg) {
|
||||
// Check for any indication of an error (either via status or error field)
|
||||
const hasError =
|
||||
|
|
@ -613,6 +623,10 @@ export const WebsocketProvider = ({ children }) => {
|
|||
status: parsedEvent.data.status || 'success',
|
||||
last_message:
|
||||
parsedEvent.data.message || epg.last_message,
|
||||
// Use the timestamp from the backend if provided
|
||||
...(parsedEvent.data.updated_at && {
|
||||
updated_at: parsedEvent.data.updated_at,
|
||||
}),
|
||||
});
|
||||
|
||||
// Only show success notification if we've finished parsing programs and had no errors
|
||||
|
|
@ -742,6 +756,7 @@ export const WebsocketProvider = ({ children }) => {
|
|||
try {
|
||||
await API.requeryChannels();
|
||||
await useChannelsStore.getState().fetchChannels();
|
||||
await fetchChannelProfiles();
|
||||
console.log('Channels refreshed after bulk creation');
|
||||
} catch (error) {
|
||||
console.error(
|
||||
|
|
|
|||
|
|
@ -170,7 +170,7 @@ export default class API {
|
|||
|
||||
static async logout() {
|
||||
return await request(`${host}/api/accounts/auth/logout/`, {
|
||||
auth: false,
|
||||
auth: true, // Send JWT token so backend can identify the user
|
||||
method: 'POST',
|
||||
});
|
||||
}
|
||||
|
|
@ -336,6 +336,15 @@ export default class API {
|
|||
delete channelData.channel_number;
|
||||
}
|
||||
|
||||
// Add channel profile IDs based on current selection
|
||||
const selectedProfileId = useChannelsStore.getState().selectedProfileId;
|
||||
if (selectedProfileId && selectedProfileId !== '0') {
|
||||
// Specific profile selected - add only to that profile
|
||||
channelData.channel_profile_ids = [parseInt(selectedProfileId)];
|
||||
}
|
||||
// If selectedProfileId is '0' or not set, don't include channel_profile_ids
|
||||
// which will trigger the backend's default behavior of adding to all profiles
|
||||
|
||||
if (channel.logo_file) {
|
||||
// Must send FormData for file upload
|
||||
body = new FormData();
|
||||
|
|
@ -1053,8 +1062,20 @@ export default class API {
|
|||
}
|
||||
|
||||
static async updateEPG(values, isToggle = false) {
|
||||
// Validate that values is an object
|
||||
if (!values || typeof values !== 'object') {
|
||||
console.error('updateEPG called with invalid values:', values);
|
||||
return;
|
||||
}
|
||||
|
||||
const { id, ...payload } = values;
|
||||
|
||||
// Validate that we have an ID and payload is an object
|
||||
if (!id || typeof payload !== 'object') {
|
||||
console.error('updateEPG: invalid id or payload', { id, payload });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// If this is just toggling the active state, make a simpler request
|
||||
if (
|
||||
|
|
@ -1337,6 +1358,183 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
// Backup API (async with Celery task polling)
|
||||
static async listBackups() {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/`);
|
||||
return response || [];
|
||||
} catch (e) {
|
||||
errorNotification('Failed to load backups', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getBackupStatus(taskId, token = null) {
|
||||
try {
|
||||
let url = `${host}/api/backups/status/${taskId}/`;
|
||||
if (token) {
|
||||
url += `?token=${encodeURIComponent(token)}`;
|
||||
}
|
||||
const response = await request(url, { auth: !token });
|
||||
return response;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async waitForBackupTask(taskId, onProgress, token = null) {
|
||||
const pollInterval = 2000; // Poll every 2 seconds
|
||||
const maxAttempts = 300; // Max 10 minutes (300 * 2s)
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
try {
|
||||
const status = await API.getBackupStatus(taskId, token);
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(status);
|
||||
}
|
||||
|
||||
if (status.state === 'completed') {
|
||||
return status.result;
|
||||
} else if (status.state === 'failed') {
|
||||
throw new Error(status.error || 'Task failed');
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
||||
}
|
||||
|
||||
throw new Error('Task timed out');
|
||||
}
|
||||
|
||||
static async createBackup(onProgress) {
|
||||
try {
|
||||
// Start the backup task
|
||||
const response = await request(`${host}/api/backups/create/`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
// Wait for the task to complete using token for auth
|
||||
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
|
||||
return result;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to create backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async uploadBackup(file) {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await request(
|
||||
`${host}/api/backups/upload/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to upload backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteBackup(filename) {
|
||||
try {
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
await request(`${host}/api/backups/${encodedFilename}/delete/`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification('Failed to delete backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getDownloadToken(filename) {
|
||||
// Get a download token from the server
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/${encodeURIComponent(filename)}/download-token/`);
|
||||
return response.token;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async downloadBackup(filename) {
|
||||
try {
|
||||
// Get a download token first (requires auth)
|
||||
const token = await API.getDownloadToken(filename);
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
|
||||
// Build the download URL with token
|
||||
const downloadUrl = `${host}/api/backups/${encodedFilename}/download/?token=${encodeURIComponent(token)}`;
|
||||
|
||||
// Use direct browser navigation instead of fetch to avoid CORS issues
|
||||
const link = document.createElement('a');
|
||||
link.href = downloadUrl;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
|
||||
return { filename };
|
||||
} catch (e) {
|
||||
errorNotification('Failed to download backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async restoreBackup(filename, onProgress) {
|
||||
try {
|
||||
// Start the restore task
|
||||
const encodedFilename = encodeURIComponent(filename);
|
||||
const response = await request(
|
||||
`${host}/api/backups/${encodedFilename}/restore/`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
|
||||
// Wait for the task to complete using token for auth
|
||||
// Token-based auth allows status polling even after DB restore invalidates user sessions
|
||||
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
|
||||
return result;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to restore backup', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getBackupSchedule() {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/schedule/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to get backup schedule', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async updateBackupSchedule(settings) {
|
||||
try {
|
||||
const response = await request(`${host}/api/backups/schedule/update/`, {
|
||||
method: 'PUT',
|
||||
body: settings,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to update backup schedule', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getVersion() {
|
||||
try {
|
||||
const response = await request(`${host}/api/core/version/`, {
|
||||
|
|
@ -1502,6 +1700,19 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async stopVODClient(clientId) {
|
||||
try {
|
||||
const response = await request(`${host}/proxy/vod/stop_client/`, {
|
||||
method: 'POST',
|
||||
body: { client_id: clientId },
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to stop VOD client', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async stopChannel(id) {
|
||||
try {
|
||||
const response = await request(`${host}/proxy/ts/stop/${id}`, {
|
||||
|
|
@ -1910,6 +2121,24 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async duplicateChannelProfile(id, name) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/channels/profiles/${id}/duplicate/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: { name },
|
||||
}
|
||||
);
|
||||
|
||||
useChannelsStore.getState().addProfile(response);
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to duplicate channel profile ${id}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteChannelProfile(id) {
|
||||
try {
|
||||
await request(`${host}/api/channels/profiles/${id}/`, {
|
||||
|
|
@ -2119,7 +2348,8 @@ export default class API {
|
|||
|
||||
static async deleteSeriesRule(tvgId) {
|
||||
try {
|
||||
await request(`${host}/api/channels/series-rules/${tvgId}/`, { method: 'DELETE' });
|
||||
const encodedTvgId = encodeURIComponent(tvgId);
|
||||
await request(`${host}/api/channels/series-rules/${encodedTvgId}/`, { method: 'DELETE' });
|
||||
notifications.show({ title: 'Series rule removed' });
|
||||
} catch (e) {
|
||||
errorNotification('Failed to remove series rule', e);
|
||||
|
|
@ -2481,4 +2711,21 @@ export default class API {
|
|||
errorNotification('Failed to update playback position', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getSystemEvents(limit = 100, offset = 0, eventType = null) {
|
||||
try {
|
||||
const params = new URLSearchParams();
|
||||
params.append('limit', limit);
|
||||
params.append('offset', offset);
|
||||
if (eventType) {
|
||||
params.append('event_type', eventType);
|
||||
}
|
||||
const response = await request(
|
||||
`${host}/api/core/system-events/?${params.toString()}`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve system events', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import useWarningsStore from '../store/warnings';
|
|||
* @param {string} props.actionKey - Unique key for this type of action (used for suppression)
|
||||
* @param {Function} props.onSuppressChange - Called when "don't show again" option changes
|
||||
* @param {string} [props.size='md'] - Size of the modal
|
||||
* @param {boolean} [props.loading=false] - Whether the confirm button should show loading state
|
||||
*/
|
||||
const ConfirmationDialog = ({
|
||||
opened,
|
||||
|
|
@ -31,6 +32,7 @@ const ConfirmationDialog = ({
|
|||
zIndex = 1000,
|
||||
showDeleteFileOption = false,
|
||||
deleteFileLabel = 'Also delete files from disk',
|
||||
loading = false,
|
||||
}) => {
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
|
||||
|
|
@ -93,10 +95,16 @@ const ConfirmationDialog = ({
|
|||
)}
|
||||
|
||||
<Group justify="flex-end">
|
||||
<Button variant="outline" onClick={handleClose}>
|
||||
<Button variant="outline" onClick={handleClose} disabled={loading}>
|
||||
{cancelLabel}
|
||||
</Button>
|
||||
<Button color="red" onClick={handleConfirm}>
|
||||
<Button
|
||||
color="red"
|
||||
onClick={handleConfirm}
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
loaderProps={{ type: 'dots' }}
|
||||
>
|
||||
{confirmLabel}
|
||||
</Button>
|
||||
</Group>
|
||||
|
|
|
|||
18
frontend/src/components/ErrorBoundary.jsx
Normal file
18
frontend/src/components/ErrorBoundary.jsx
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import React from 'react';
|
||||
|
||||
class ErrorBoundary extends React.Component {
|
||||
state = { hasError: false };
|
||||
|
||||
static getDerivedStateFromError(error) {
|
||||
return { hasError: true };
|
||||
}
|
||||
|
||||
render() {
|
||||
if (this.state.hasError) {
|
||||
return <div>Something went wrong</div>;
|
||||
}
|
||||
return this.props.children;
|
||||
}
|
||||
}
|
||||
|
||||
export default ErrorBoundary;
|
||||
47
frontend/src/components/Field.jsx
Normal file
47
frontend/src/components/Field.jsx
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import { NumberInput, Select, Switch, TextInput } from '@mantine/core';
|
||||
import React from 'react';
|
||||
|
||||
export const Field = ({ field, value, onChange }) => {
|
||||
const common = { label: field.label, description: field.help_text };
|
||||
const effective = value ?? field.default;
|
||||
switch (field.type) {
|
||||
case 'boolean':
|
||||
return (
|
||||
<Switch
|
||||
checked={!!effective}
|
||||
onChange={(e) => onChange(field.id, e.currentTarget.checked)}
|
||||
label={field.label}
|
||||
description={field.help_text}
|
||||
/>
|
||||
);
|
||||
case 'number':
|
||||
return (
|
||||
<NumberInput
|
||||
value={value ?? field.default ?? 0}
|
||||
onChange={(v) => onChange(field.id, v)}
|
||||
{...common}
|
||||
/>
|
||||
);
|
||||
case 'select':
|
||||
return (
|
||||
<Select
|
||||
value={(value ?? field.default ?? '') + ''}
|
||||
data={(field.options || []).map((o) => ({
|
||||
value: o.value + '',
|
||||
label: o.label,
|
||||
}))}
|
||||
onChange={(v) => onChange(field.id, v)}
|
||||
{...common}
|
||||
/>
|
||||
);
|
||||
case 'string':
|
||||
default:
|
||||
return (
|
||||
<TextInput
|
||||
value={value ?? field.default ?? ''}
|
||||
onChange={(e) => onChange(field.id, e.currentTarget.value)}
|
||||
{...common}
|
||||
/>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// frontend/src/components/FloatingVideo.js
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import Draggable from 'react-draggable';
|
||||
import useVideoStore from '../store/useVideoStore';
|
||||
import mpegts from 'mpegts.js';
|
||||
|
|
@ -17,7 +17,94 @@ export default function FloatingVideo() {
|
|||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [loadError, setLoadError] = useState(null);
|
||||
const [showOverlay, setShowOverlay] = useState(true);
|
||||
const [videoSize, setVideoSize] = useState({ width: 320, height: 180 });
|
||||
const [isResizing, setIsResizing] = useState(false);
|
||||
const resizeStateRef = useRef(null);
|
||||
const overlayTimeoutRef = useRef(null);
|
||||
const aspectRatioRef = useRef(320 / 180);
|
||||
const [dragPosition, setDragPosition] = useState(null);
|
||||
const dragPositionRef = useRef(null);
|
||||
const dragOffsetRef = useRef({ x: 0, y: 0 });
|
||||
const initialPositionRef = useRef(null);
|
||||
|
||||
const MIN_WIDTH = 220;
|
||||
const MIN_HEIGHT = 124;
|
||||
const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging
|
||||
const HEADER_HEIGHT = 38; // height of the close button header area
|
||||
const ERROR_HEIGHT = 45; // approximate height of error message area when displayed
|
||||
const HANDLE_SIZE = 18;
|
||||
const HANDLE_OFFSET = 0;
|
||||
const resizeHandleBaseStyle = {
|
||||
position: 'absolute',
|
||||
width: HANDLE_SIZE,
|
||||
height: HANDLE_SIZE,
|
||||
backgroundColor: 'transparent',
|
||||
borderRadius: 6,
|
||||
zIndex: 8,
|
||||
touchAction: 'none',
|
||||
};
|
||||
const resizeHandles = [
|
||||
{
|
||||
id: 'bottom-right',
|
||||
cursor: 'nwse-resize',
|
||||
xDir: 1,
|
||||
yDir: 1,
|
||||
isLeft: false,
|
||||
isTop: false,
|
||||
style: {
|
||||
bottom: HANDLE_OFFSET,
|
||||
right: HANDLE_OFFSET,
|
||||
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 0 6px 0',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bottom-left',
|
||||
cursor: 'nesw-resize',
|
||||
xDir: -1,
|
||||
yDir: 1,
|
||||
isLeft: true,
|
||||
isTop: false,
|
||||
style: {
|
||||
bottom: HANDLE_OFFSET,
|
||||
left: HANDLE_OFFSET,
|
||||
borderBottom: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 0 0 6px',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'top-right',
|
||||
cursor: 'nesw-resize',
|
||||
xDir: 1,
|
||||
yDir: -1,
|
||||
isLeft: false,
|
||||
isTop: true,
|
||||
style: {
|
||||
top: HANDLE_OFFSET,
|
||||
right: HANDLE_OFFSET,
|
||||
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRight: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '0 6px 0 0',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'top-left',
|
||||
cursor: 'nwse-resize',
|
||||
xDir: -1,
|
||||
yDir: -1,
|
||||
isLeft: true,
|
||||
isTop: true,
|
||||
style: {
|
||||
top: HANDLE_OFFSET,
|
||||
left: HANDLE_OFFSET,
|
||||
borderTop: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderLeft: '2px solid rgba(255, 255, 255, 0.9)',
|
||||
borderRadius: '6px 0 0 0',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Safely destroy the mpegts player to prevent errors
|
||||
const safeDestroyPlayer = () => {
|
||||
|
|
@ -315,24 +402,319 @@ export default function FloatingVideo() {
|
|||
}, 50);
|
||||
};
|
||||
|
||||
const clampToVisible = useCallback(
|
||||
(x, y) => {
|
||||
if (typeof window === 'undefined') return { x, y };
|
||||
|
||||
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const minX = -(videoSize.width - VISIBLE_MARGIN);
|
||||
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||
const maxX = window.innerWidth - videoSize.width;
|
||||
const maxY = window.innerHeight - totalHeight;
|
||||
|
||||
return {
|
||||
x: Math.min(Math.max(x, minX), maxX),
|
||||
y: Math.min(Math.max(y, minY), maxY),
|
||||
};
|
||||
},
|
||||
[
|
||||
VISIBLE_MARGIN,
|
||||
HEADER_HEIGHT,
|
||||
ERROR_HEIGHT,
|
||||
videoSize.height,
|
||||
videoSize.width,
|
||||
]
|
||||
);
|
||||
|
||||
const clampToVisibleWithSize = useCallback(
|
||||
(x, y, width, height) => {
|
||||
if (typeof window === 'undefined') return { x, y };
|
||||
|
||||
const totalHeight = height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const minX = -(width - VISIBLE_MARGIN);
|
||||
const minY = -(totalHeight - VISIBLE_MARGIN);
|
||||
const maxX = window.innerWidth - width;
|
||||
const maxY = window.innerHeight - totalHeight;
|
||||
|
||||
return {
|
||||
x: Math.min(Math.max(x, minX), maxX),
|
||||
y: Math.min(Math.max(y, minY), maxY),
|
||||
};
|
||||
},
|
||||
[VISIBLE_MARGIN, HEADER_HEIGHT, ERROR_HEIGHT]
|
||||
);
|
||||
|
||||
const handleResizeMove = useCallback(
|
||||
(event) => {
|
||||
if (!resizeStateRef.current) return;
|
||||
|
||||
const clientX =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientX
|
||||
: event.clientX;
|
||||
const clientY =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientY
|
||||
: event.clientY;
|
||||
|
||||
const {
|
||||
startX,
|
||||
startY,
|
||||
startWidth,
|
||||
startHeight,
|
||||
startPos,
|
||||
handle,
|
||||
aspectRatio,
|
||||
} = resizeStateRef.current;
|
||||
const deltaX = clientX - startX;
|
||||
const deltaY = clientY - startY;
|
||||
const widthDelta = deltaX * handle.xDir;
|
||||
const heightDelta = deltaY * handle.yDir;
|
||||
const ratio = aspectRatio || aspectRatioRef.current;
|
||||
|
||||
// Derive width/height while keeping the original aspect ratio
|
||||
let nextWidth = startWidth + widthDelta;
|
||||
let nextHeight = nextWidth / ratio;
|
||||
|
||||
// Allow vertical-driven resize if the user drags mostly vertically
|
||||
if (Math.abs(deltaY) > Math.abs(deltaX)) {
|
||||
nextHeight = startHeight + heightDelta;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Respect minimums while keeping the ratio
|
||||
if (nextWidth < MIN_WIDTH) {
|
||||
nextWidth = MIN_WIDTH;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
if (nextHeight < MIN_HEIGHT) {
|
||||
nextHeight = MIN_HEIGHT;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Keep within viewport with a margin based on current position
|
||||
const posX = startPos?.x ?? 0;
|
||||
const posY = startPos?.y ?? 0;
|
||||
const margin = VISIBLE_MARGIN;
|
||||
let maxWidth = null;
|
||||
let maxHeight = null;
|
||||
|
||||
if (!handle.isLeft) {
|
||||
maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin);
|
||||
}
|
||||
|
||||
if (!handle.isTop) {
|
||||
maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin);
|
||||
}
|
||||
|
||||
if (maxWidth != null && nextWidth > maxWidth) {
|
||||
nextWidth = maxWidth;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
if (maxHeight != null && nextHeight > maxHeight) {
|
||||
nextHeight = maxHeight;
|
||||
nextWidth = nextHeight * ratio;
|
||||
}
|
||||
|
||||
// Final pass to honor both bounds while keeping the ratio
|
||||
if (maxWidth != null && nextWidth > maxWidth) {
|
||||
nextWidth = maxWidth;
|
||||
nextHeight = nextWidth / ratio;
|
||||
}
|
||||
|
||||
setVideoSize({
|
||||
width: Math.round(nextWidth),
|
||||
height: Math.round(nextHeight),
|
||||
});
|
||||
|
||||
if (handle.isLeft || handle.isTop) {
|
||||
let nextX = posX;
|
||||
let nextY = posY;
|
||||
|
||||
if (handle.isLeft) {
|
||||
nextX = posX + (startWidth - nextWidth);
|
||||
}
|
||||
|
||||
if (handle.isTop) {
|
||||
nextY = posY + (startHeight - nextHeight);
|
||||
}
|
||||
|
||||
const clamped = clampToVisibleWithSize(
|
||||
nextX,
|
||||
nextY,
|
||||
nextWidth,
|
||||
nextHeight
|
||||
);
|
||||
|
||||
if (handle.isLeft) {
|
||||
nextX = clamped.x;
|
||||
}
|
||||
|
||||
if (handle.isTop) {
|
||||
nextY = clamped.y;
|
||||
}
|
||||
|
||||
const nextPos = { x: nextX, y: nextY };
|
||||
setDragPosition(nextPos);
|
||||
dragPositionRef.current = nextPos;
|
||||
}
|
||||
},
|
||||
[MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN, clampToVisibleWithSize]
|
||||
);
|
||||
|
||||
const endResize = useCallback(() => {
|
||||
setIsResizing(false);
|
||||
resizeStateRef.current = null;
|
||||
window.removeEventListener('mousemove', handleResizeMove);
|
||||
window.removeEventListener('mouseup', endResize);
|
||||
window.removeEventListener('touchmove', handleResizeMove);
|
||||
window.removeEventListener('touchend', endResize);
|
||||
}, [handleResizeMove]);
|
||||
|
||||
const startResize = (event, handle) => {
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
|
||||
const clientX =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientX
|
||||
: event.clientX;
|
||||
const clientY =
|
||||
event.touches && event.touches.length
|
||||
? event.touches[0].clientY
|
||||
: event.clientY;
|
||||
|
||||
const aspectRatio =
|
||||
videoSize.height > 0
|
||||
? videoSize.width / videoSize.height
|
||||
: aspectRatioRef.current;
|
||||
aspectRatioRef.current = aspectRatio;
|
||||
const startPos = dragPositionRef.current ||
|
||||
initialPositionRef.current || { x: 0, y: 0 };
|
||||
|
||||
resizeStateRef.current = {
|
||||
startX: clientX,
|
||||
startY: clientY,
|
||||
startWidth: videoSize.width,
|
||||
startHeight: videoSize.height,
|
||||
aspectRatio,
|
||||
startPos,
|
||||
handle,
|
||||
};
|
||||
|
||||
setIsResizing(true);
|
||||
|
||||
window.addEventListener('mousemove', handleResizeMove);
|
||||
window.addEventListener('mouseup', endResize);
|
||||
window.addEventListener('touchmove', handleResizeMove);
|
||||
window.addEventListener('touchend', endResize);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
endResize();
|
||||
};
|
||||
}, [endResize]);
|
||||
|
||||
useEffect(() => {
|
||||
dragPositionRef.current = dragPosition;
|
||||
}, [dragPosition]);
|
||||
|
||||
// Initialize the floating window near bottom-right once
|
||||
useEffect(() => {
|
||||
if (initialPositionRef.current || typeof window === 'undefined') return;
|
||||
|
||||
const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT;
|
||||
const initialX = Math.max(10, window.innerWidth - videoSize.width - 20);
|
||||
const initialY = Math.max(10, window.innerHeight - totalHeight - 20);
|
||||
const pos = clampToVisible(initialX, initialY);
|
||||
|
||||
initialPositionRef.current = pos;
|
||||
setDragPosition(pos);
|
||||
dragPositionRef.current = pos;
|
||||
}, [
|
||||
clampToVisible,
|
||||
videoSize.height,
|
||||
videoSize.width,
|
||||
HEADER_HEIGHT,
|
||||
ERROR_HEIGHT,
|
||||
]);
|
||||
|
||||
const handleDragStart = useCallback(
|
||||
(event, data) => {
|
||||
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||
const rect = videoContainerRef.current?.getBoundingClientRect();
|
||||
|
||||
if (clientX != null && clientY != null && rect) {
|
||||
dragOffsetRef.current = {
|
||||
x: clientX - rect.left,
|
||||
y: clientY - rect.top,
|
||||
};
|
||||
} else {
|
||||
dragOffsetRef.current = { x: 0, y: 0 };
|
||||
}
|
||||
|
||||
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
const handleDrag = useCallback(
|
||||
(event) => {
|
||||
const clientX = event.touches?.[0]?.clientX ?? event.clientX;
|
||||
const clientY = event.touches?.[0]?.clientY ?? event.clientY;
|
||||
if (clientX == null || clientY == null) return;
|
||||
|
||||
const nextX = clientX - (dragOffsetRef.current?.x ?? 0);
|
||||
const nextY = clientY - (dragOffsetRef.current?.y ?? 0);
|
||||
const clamped = clampToVisible(nextX, nextY);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
const handleDragStop = useCallback(
|
||||
(_, data) => {
|
||||
const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0);
|
||||
setDragPosition(clamped);
|
||||
dragPositionRef.current = clamped;
|
||||
},
|
||||
[clampToVisible]
|
||||
);
|
||||
|
||||
// If the floating video is hidden or no URL is selected, do not render
|
||||
if (!isVisible || !streamUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Draggable nodeRef={videoContainerRef}>
|
||||
<Draggable
|
||||
nodeRef={videoContainerRef}
|
||||
cancel=".floating-video-no-drag"
|
||||
disabled={isResizing}
|
||||
position={dragPosition || undefined}
|
||||
defaultPosition={initialPositionRef.current || { x: 0, y: 0 }}
|
||||
onStart={handleDragStart}
|
||||
onDrag={handleDrag}
|
||||
onStop={handleDragStop}
|
||||
>
|
||||
<div
|
||||
ref={videoContainerRef}
|
||||
style={{
|
||||
position: 'fixed',
|
||||
bottom: '20px',
|
||||
right: '20px',
|
||||
width: '320px',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: `${videoSize.width}px`,
|
||||
zIndex: 9999,
|
||||
backgroundColor: '#333',
|
||||
borderRadius: '8px',
|
||||
overflow: 'hidden',
|
||||
overflow: 'visible',
|
||||
boxShadow: '0 2px 10px rgba(0,0,0,0.7)',
|
||||
}}
|
||||
>
|
||||
|
|
@ -378,10 +760,12 @@ export default function FloatingVideo() {
|
|||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
className="floating-video-no-drag"
|
||||
style={{
|
||||
width: '100%',
|
||||
height: '180px',
|
||||
height: `${videoSize.height}px`,
|
||||
backgroundColor: '#000',
|
||||
borderRadius: '0 0 8px 8px',
|
||||
// Better controls styling for VOD
|
||||
...(contentType === 'vod' && {
|
||||
controlsList: 'nodownload',
|
||||
|
|
@ -468,6 +852,21 @@ export default function FloatingVideo() {
|
|||
</Text>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Resize handles */}
|
||||
{resizeHandles.map((handle) => (
|
||||
<Box
|
||||
key={handle.id}
|
||||
className="floating-video-no-drag"
|
||||
onMouseDown={(event) => startResize(event, handle)}
|
||||
onTouchStart={(event) => startResize(event, handle)}
|
||||
style={{
|
||||
...resizeHandleBaseStyle,
|
||||
...handle.style,
|
||||
cursor: handle.cursor,
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Draggable>
|
||||
);
|
||||
|
|
|
|||
206
frontend/src/components/GuideRow.jsx
Normal file
206
frontend/src/components/GuideRow.jsx
Normal file
|
|
@ -0,0 +1,206 @@
|
|||
import React from "react";
|
||||
import {
|
||||
CHANNEL_WIDTH,
|
||||
EXPANDED_PROGRAM_HEIGHT,
|
||||
HOUR_WIDTH,
|
||||
PROGRAM_HEIGHT,
|
||||
} from '../pages/guideUtils.js';
|
||||
import {Box, Flex, Text} from "@mantine/core";
|
||||
import {Play} from "lucide-react";
|
||||
import logo from "../images/logo.png";
|
||||
|
||||
const GuideRow = React.memo(({ index, style, data }) => {
|
||||
const {
|
||||
filteredChannels,
|
||||
programsByChannelId,
|
||||
expandedProgramId,
|
||||
rowHeights,
|
||||
logos,
|
||||
hoveredChannelId,
|
||||
setHoveredChannelId,
|
||||
renderProgram,
|
||||
handleLogoClick,
|
||||
contentWidth,
|
||||
} = data;
|
||||
|
||||
const channel = filteredChannels[index];
|
||||
if (!channel) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const channelPrograms = programsByChannelId.get(channel.id) || [];
|
||||
const rowHeight =
|
||||
rowHeights[index] ??
|
||||
(channelPrograms.some((program) => program.id === expandedProgramId)
|
||||
? EXPANDED_PROGRAM_HEIGHT
|
||||
: PROGRAM_HEIGHT);
|
||||
|
||||
const PlaceholderProgram = () => {
|
||||
return <>
|
||||
{Array.from({length: Math.ceil(24 / 2)}).map(
|
||||
(_, placeholderIndex) => (
|
||||
<Box
|
||||
key={`placeholder-${channel.id}-${placeholderIndex}`}
|
||||
style={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
pos='absolute'
|
||||
left={placeholderIndex * (HOUR_WIDTH * 2)}
|
||||
top={0}
|
||||
w={HOUR_WIDTH * 2}
|
||||
h={rowHeight - 4}
|
||||
bd={'1px dashed #2D3748'}
|
||||
bdrs={4}
|
||||
display={'flex'}
|
||||
c='#4A5568'
|
||||
>
|
||||
<Text size="sm">No program data</Text>
|
||||
</Box>
|
||||
)
|
||||
)}
|
||||
</>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid="guide-row"
|
||||
style={{ ...style, width: contentWidth, height: rowHeight }}
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
borderBottom: '0px solid #27272A',
|
||||
transition: 'height 0.2s ease',
|
||||
overflow: 'visible',
|
||||
}}
|
||||
display={'flex'}
|
||||
h={'100%'}
|
||||
pos='relative'
|
||||
>
|
||||
<Box
|
||||
className="channel-logo"
|
||||
style={{
|
||||
flexShrink: 0,
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
backgroundColor: '#18181B',
|
||||
borderRight: '1px solid #27272A',
|
||||
borderBottom: '1px solid #27272A',
|
||||
boxShadow: '2px 0 5px rgba(0,0,0,0.2)',
|
||||
zIndex: 30,
|
||||
transition: 'height 0.2s ease',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
w={CHANNEL_WIDTH}
|
||||
miw={CHANNEL_WIDTH}
|
||||
display={'flex'}
|
||||
left={0}
|
||||
h={'100%'}
|
||||
pos='relative'
|
||||
onClick={(event) => handleLogoClick(channel, event)}
|
||||
onMouseEnter={() => setHoveredChannelId(channel.id)}
|
||||
onMouseLeave={() => setHoveredChannelId(null)}
|
||||
>
|
||||
{hoveredChannelId === channel.id && (
|
||||
<Flex
|
||||
align="center"
|
||||
justify="center"
|
||||
style={{
|
||||
backgroundColor: 'rgba(0, 0, 0, 0.7)',
|
||||
zIndex: 10,
|
||||
animation: 'fadeIn 0.2s',
|
||||
}}
|
||||
pos='absolute'
|
||||
top={0}
|
||||
left={0}
|
||||
right={0}
|
||||
bottom={0}
|
||||
w={'100%'}
|
||||
h={'100%'}
|
||||
>
|
||||
<Play size={32} color="#fff" fill="#fff" />
|
||||
</Flex>
|
||||
)}
|
||||
|
||||
<Flex
|
||||
direction="column"
|
||||
align="center"
|
||||
justify="space-between"
|
||||
style={{
|
||||
boxSizing: 'border-box',
|
||||
zIndex: 5,
|
||||
}}
|
||||
w={'100%'}
|
||||
h={'100%'}
|
||||
p={'4px'}
|
||||
pos='relative'
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
w={'100%'}
|
||||
h={`${rowHeight - 32}px`}
|
||||
display={'flex'}
|
||||
p={'4px'}
|
||||
mb={'4px'}
|
||||
>
|
||||
<img
|
||||
src={logos[channel.logo_id]?.cache_url || logo}
|
||||
alt={channel.name}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
maxHeight: '100%',
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
<Text
|
||||
size="sm"
|
||||
weight={600}
|
||||
style={{
|
||||
transform: 'translateX(-50%)',
|
||||
backgroundColor: '#18181B',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
pos='absolute'
|
||||
bottom={4}
|
||||
left={'50%'}
|
||||
p={'2px 8px'}
|
||||
bdrs={4}
|
||||
fz={'0.85em'}
|
||||
bd={'1px solid #27272A'}
|
||||
h={'24px'}
|
||||
display={'flex'}
|
||||
miw={'36px'}
|
||||
>
|
||||
{channel.channel_number || '-'}
|
||||
</Text>
|
||||
</Flex>
|
||||
</Box>
|
||||
|
||||
<Box
|
||||
style={{
|
||||
transition: 'height 0.2s ease',
|
||||
}}
|
||||
flex={1}
|
||||
pos='relative'
|
||||
h={'100%'}
|
||||
pl={0}
|
||||
>
|
||||
{channelPrograms.length > 0 ? (
|
||||
channelPrograms.map((program) =>
|
||||
renderProgram(program, undefined, channel)
|
||||
)
|
||||
) : <PlaceholderProgram />}
|
||||
</Box>
|
||||
</Box>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
export default GuideRow;
|
||||
105
frontend/src/components/HourTimeline.jsx
Normal file
105
frontend/src/components/HourTimeline.jsx
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import React from 'react';
|
||||
import { Box, Text } from '@mantine/core';
|
||||
import { format } from '../utils/dateTimeUtils.js';
|
||||
import { HOUR_WIDTH } from '../pages/guideUtils.js';
|
||||
|
||||
const HourBlock = React.memo(({ hourData, timeFormat, formatDayLabel, handleTimeClick }) => {
|
||||
const { time, isNewDay } = hourData;
|
||||
|
||||
return (
|
||||
<Box
|
||||
key={format(time)}
|
||||
style={{
|
||||
borderRight: '1px solid #8DAFAA',
|
||||
cursor: 'pointer',
|
||||
borderLeft: isNewDay ? '2px solid #3BA882' : 'none',
|
||||
backgroundColor: isNewDay ? '#1E2A27' : '#1B2421',
|
||||
}}
|
||||
w={HOUR_WIDTH}
|
||||
h={'40px'}
|
||||
pos='relative'
|
||||
c='#a0aec0'
|
||||
onClick={(e) => handleTimeClick(time, e)}
|
||||
>
|
||||
<Text
|
||||
size="sm"
|
||||
style={{ transform: 'none' }}
|
||||
pos='absolute'
|
||||
top={8}
|
||||
left={4}
|
||||
bdrs={2}
|
||||
lh={1.2}
|
||||
ta='left'
|
||||
>
|
||||
<Text
|
||||
span
|
||||
size="xs"
|
||||
display={'block'}
|
||||
opacity={0.7}
|
||||
fw={isNewDay ? 600 : 400}
|
||||
c={isNewDay ? '#3BA882' : undefined}
|
||||
>
|
||||
{formatDayLabel(time)}
|
||||
</Text>
|
||||
{format(time, timeFormat)}
|
||||
<Text span size="xs" ml={1} opacity={0.7} />
|
||||
</Text>
|
||||
|
||||
<Box
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
zIndex: 10,
|
||||
}}
|
||||
pos='absolute'
|
||||
left={0}
|
||||
top={0}
|
||||
bottom={0}
|
||||
w={'1px'}
|
||||
/>
|
||||
|
||||
<Box
|
||||
style={{ justifyContent: 'space-between' }}
|
||||
pos='absolute'
|
||||
bottom={0}
|
||||
w={'100%'}
|
||||
display={'flex'}
|
||||
p={'0 1px'}
|
||||
>
|
||||
{[15, 30, 45].map((minute) => (
|
||||
<Box
|
||||
key={minute}
|
||||
style={{ backgroundColor: '#718096' }}
|
||||
w={'1px'}
|
||||
h={'8px'}
|
||||
pos='absolute'
|
||||
bottom={0}
|
||||
left={`${(minute / 60) * 100}%`}
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
});
|
||||
|
||||
const HourTimeline = React.memo(({
|
||||
hourTimeline,
|
||||
timeFormat,
|
||||
formatDayLabel,
|
||||
handleTimeClick
|
||||
}) => {
|
||||
return (
|
||||
<>
|
||||
{hourTimeline.map((hourData) => (
|
||||
<HourBlock
|
||||
key={format(hourData.time)}
|
||||
hourData={hourData}
|
||||
timeFormat={timeFormat}
|
||||
formatDayLabel={formatDayLabel}
|
||||
handleTimeClick={handleTimeClick}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
export default HourTimeline;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Skeleton } from '@mantine/core';
|
||||
import useLogosStore from '../store/logos';
|
||||
import logo from '../images/logo.png'; // Default logo
|
||||
|
|
@ -16,15 +16,16 @@ const LazyLogo = ({
|
|||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [hasError, setHasError] = useState(false);
|
||||
const fetchAttempted = useRef(new Set()); // Track which IDs we've already tried to fetch
|
||||
const fetchAttempted = useRef(new Set());
|
||||
const isMountedRef = useRef(true);
|
||||
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogosByIds = useLogosStore((s) => s.fetchLogosByIds);
|
||||
const allowLogoRendering = useLogosStore((s) => s.allowLogoRendering);
|
||||
|
||||
// Determine the logo source
|
||||
const logoData = logoId && logos[logoId];
|
||||
const logoSrc = logoData?.cache_url || fallbackSrc; // Only use cache URL if we have logo data
|
||||
const logoSrc = logoData?.cache_url || fallbackSrc;
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
|
|
@ -34,6 +35,9 @@ const LazyLogo = ({
|
|||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Don't start fetching until logo rendering is allowed
|
||||
if (!allowLogoRendering) return;
|
||||
|
||||
// If we have a logoId but no logo data, add it to the batch request queue
|
||||
if (
|
||||
logoId &&
|
||||
|
|
@ -44,7 +48,7 @@ const LazyLogo = ({
|
|||
isMountedRef.current
|
||||
) {
|
||||
setIsLoading(true);
|
||||
fetchAttempted.current.add(logoId); // Mark this ID as attempted
|
||||
fetchAttempted.current.add(logoId);
|
||||
logoRequestQueue.add(logoId);
|
||||
|
||||
// Clear existing timer and set new one to batch requests
|
||||
|
|
@ -82,7 +86,7 @@ const LazyLogo = ({
|
|||
setIsLoading(false);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [logoId, fetchLogosByIds, logoData]); // Include logoData to detect when it becomes available
|
||||
}, [logoId, fetchLogosByIds, logoData, allowLogoRendering]);
|
||||
|
||||
// Reset error state when logoId changes
|
||||
useEffect(() => {
|
||||
|
|
@ -91,8 +95,10 @@ const LazyLogo = ({
|
|||
}
|
||||
}, [logoId]);
|
||||
|
||||
// Show skeleton while loading
|
||||
if (isLoading && !logoData) {
|
||||
// Show skeleton if:
|
||||
// 1. Logo rendering is not allowed yet, OR
|
||||
// 2. We don't have logo data yet (regardless of loading state)
|
||||
if (logoId && (!allowLogoRendering || !logoData)) {
|
||||
return (
|
||||
<Skeleton
|
||||
height={style.maxHeight || 18}
|
||||
|
|
|
|||
26
frontend/src/components/RecordingSynopsis.jsx
Normal file
26
frontend/src/components/RecordingSynopsis.jsx
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import { Text, } from '@mantine/core';
|
||||
|
||||
// Short preview that triggers the details modal when clicked
|
||||
const RecordingSynopsis = ({ description, onOpen }) => {
|
||||
const truncated = description?.length > 140;
|
||||
const preview = truncated
|
||||
? `${description.slice(0, 140).trim()}...`
|
||||
: description;
|
||||
|
||||
if (!description) return null;
|
||||
|
||||
return (
|
||||
<Text
|
||||
size="xs"
|
||||
c="dimmed"
|
||||
lineClamp={2}
|
||||
title={description}
|
||||
onClick={() => onOpen?.()}
|
||||
style={{ cursor: 'pointer' }}
|
||||
>
|
||||
{preview}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
export default RecordingSynopsis;
|
||||
|
|
@ -928,7 +928,8 @@ const SeriesModal = ({ series, opened, onClose }) => {
|
|||
src={trailerUrl}
|
||||
title="YouTube Trailer"
|
||||
frameBorder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
referrerPolicy="strict-origin-when-cross-origin"
|
||||
allowFullScreen
|
||||
style={{
|
||||
position: 'absolute',
|
||||
|
|
|
|||
|
|
@ -188,8 +188,8 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
|
|||
}
|
||||
};
|
||||
|
||||
const onLogout = () => {
|
||||
logout();
|
||||
const onLogout = async () => {
|
||||
await logout();
|
||||
window.location.reload();
|
||||
};
|
||||
|
||||
|
|
|
|||
333
frontend/src/components/SystemEvents.jsx
Normal file
333
frontend/src/components/SystemEvents.jsx
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
import React, { useState, useEffect, useCallback } from 'react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
Button,
|
||||
Card,
|
||||
Group,
|
||||
NumberInput,
|
||||
Pagination,
|
||||
Select,
|
||||
Stack,
|
||||
Text,
|
||||
Title,
|
||||
} from '@mantine/core';
|
||||
import { useElementSize } from '@mantine/hooks';
|
||||
import {
|
||||
ChevronDown,
|
||||
CirclePlay,
|
||||
Download,
|
||||
Gauge,
|
||||
HardDriveDownload,
|
||||
List,
|
||||
LogIn,
|
||||
LogOut,
|
||||
RefreshCw,
|
||||
Shield,
|
||||
ShieldAlert,
|
||||
SquareX,
|
||||
Timer,
|
||||
Users,
|
||||
Video,
|
||||
XCircle,
|
||||
} from 'lucide-react';
|
||||
import dayjs from 'dayjs';
|
||||
import API from '../api';
|
||||
import useLocalStorage from '../hooks/useLocalStorage';
|
||||
|
||||
const SystemEvents = () => {
|
||||
const [events, setEvents] = useState([]);
|
||||
const [totalEvents, setTotalEvents] = useState(0);
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const { ref: cardRef, width: cardWidth } = useElementSize();
|
||||
const isNarrow = cardWidth < 650;
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
||||
const [eventsRefreshInterval, setEventsRefreshInterval] = useLocalStorage(
|
||||
'events-refresh-interval',
|
||||
0
|
||||
);
|
||||
const [eventsLimit, setEventsLimit] = useLocalStorage('events-limit', 100);
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
|
||||
// Calculate offset based on current page and limit
|
||||
const offset = (currentPage - 1) * eventsLimit;
|
||||
const totalPages = Math.ceil(totalEvents / eventsLimit);
|
||||
|
||||
const fetchEvents = useCallback(async () => {
|
||||
try {
|
||||
setIsLoading(true);
|
||||
const response = await API.getSystemEvents(eventsLimit, offset);
|
||||
if (response && response.events) {
|
||||
setEvents(response.events);
|
||||
setTotalEvents(response.total || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching system events:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [eventsLimit, offset]);
|
||||
|
||||
// Fetch events on mount and when eventsRefreshInterval changes
|
||||
useEffect(() => {
|
||||
fetchEvents();
|
||||
|
||||
// Set up polling if interval is set and events section is expanded
|
||||
if (eventsRefreshInterval > 0 && isExpanded) {
|
||||
const interval = setInterval(fetchEvents, eventsRefreshInterval * 1000);
|
||||
return () => clearInterval(interval);
|
||||
}
|
||||
}, [fetchEvents, eventsRefreshInterval, isExpanded]);
|
||||
|
||||
// Reset to first page when limit changes
|
||||
useEffect(() => {
|
||||
setCurrentPage(1);
|
||||
}, [eventsLimit]);
|
||||
|
||||
const getEventIcon = (eventType) => {
|
||||
switch (eventType) {
|
||||
case 'channel_start':
|
||||
return <CirclePlay size={16} />;
|
||||
case 'channel_stop':
|
||||
return <SquareX size={16} />;
|
||||
case 'channel_reconnect':
|
||||
return <RefreshCw size={16} />;
|
||||
case 'channel_buffering':
|
||||
return <Timer size={16} />;
|
||||
case 'channel_failover':
|
||||
return <HardDriveDownload size={16} />;
|
||||
case 'client_connect':
|
||||
return <Users size={16} />;
|
||||
case 'client_disconnect':
|
||||
return <Users size={16} />;
|
||||
case 'recording_start':
|
||||
return <Video size={16} />;
|
||||
case 'recording_end':
|
||||
return <Video size={16} />;
|
||||
case 'stream_switch':
|
||||
return <HardDriveDownload size={16} />;
|
||||
case 'm3u_refresh':
|
||||
return <RefreshCw size={16} />;
|
||||
case 'm3u_download':
|
||||
return <Download size={16} />;
|
||||
case 'epg_refresh':
|
||||
return <RefreshCw size={16} />;
|
||||
case 'epg_download':
|
||||
return <Download size={16} />;
|
||||
case 'login_success':
|
||||
return <LogIn size={16} />;
|
||||
case 'login_failed':
|
||||
return <ShieldAlert size={16} />;
|
||||
case 'logout':
|
||||
return <LogOut size={16} />;
|
||||
case 'm3u_blocked':
|
||||
return <XCircle size={16} />;
|
||||
case 'epg_blocked':
|
||||
return <XCircle size={16} />;
|
||||
default:
|
||||
return <Gauge size={16} />;
|
||||
}
|
||||
};
|
||||
|
||||
const getEventColor = (eventType) => {
|
||||
switch (eventType) {
|
||||
case 'channel_start':
|
||||
case 'client_connect':
|
||||
case 'recording_start':
|
||||
case 'login_success':
|
||||
return 'green';
|
||||
case 'channel_reconnect':
|
||||
return 'yellow';
|
||||
case 'channel_stop':
|
||||
case 'client_disconnect':
|
||||
case 'recording_end':
|
||||
case 'logout':
|
||||
return 'gray';
|
||||
case 'channel_buffering':
|
||||
return 'yellow';
|
||||
case 'channel_failover':
|
||||
case 'channel_error':
|
||||
return 'orange';
|
||||
case 'stream_switch':
|
||||
return 'blue';
|
||||
case 'm3u_refresh':
|
||||
case 'epg_refresh':
|
||||
return 'cyan';
|
||||
case 'm3u_download':
|
||||
case 'epg_download':
|
||||
return 'teal';
|
||||
case 'login_failed':
|
||||
case 'm3u_blocked':
|
||||
case 'epg_blocked':
|
||||
return 'red';
|
||||
default:
|
||||
return 'gray';
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
ref={cardRef}
|
||||
shadow="sm"
|
||||
padding="sm"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{
|
||||
color: '#fff',
|
||||
backgroundColor: '#27272A',
|
||||
width: '100%',
|
||||
maxWidth: isExpanded ? '100%' : '800px',
|
||||
marginLeft: 'auto',
|
||||
marginRight: 'auto',
|
||||
transition: 'max-width 0.3s ease',
|
||||
}}
|
||||
>
|
||||
<Group justify="space-between" mb={isExpanded ? 'sm' : 0}>
|
||||
<Group gap="xs">
|
||||
<Gauge size={20} />
|
||||
<Title order={4}>System Events</Title>
|
||||
</Group>
|
||||
<Group gap="xs">
|
||||
{(isExpanded || !isNarrow) && (
|
||||
<>
|
||||
<NumberInput
|
||||
size="xs"
|
||||
label="Events Per Page"
|
||||
value={eventsLimit}
|
||||
onChange={(value) => setEventsLimit(value || 10)}
|
||||
min={10}
|
||||
max={1000}
|
||||
step={10}
|
||||
style={{ width: 130 }}
|
||||
/>
|
||||
<Select
|
||||
size="xs"
|
||||
label="Auto Refresh"
|
||||
value={eventsRefreshInterval.toString()}
|
||||
onChange={(value) => setEventsRefreshInterval(parseInt(value))}
|
||||
data={[
|
||||
{ value: '0', label: 'Manual' },
|
||||
{ value: '5', label: '5s' },
|
||||
{ value: '10', label: '10s' },
|
||||
{ value: '30', label: '30s' },
|
||||
{ value: '60', label: '1m' },
|
||||
]}
|
||||
style={{ width: 120 }}
|
||||
/>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="subtle"
|
||||
onClick={fetchEvents}
|
||||
loading={isLoading}
|
||||
style={{ marginTop: 'auto' }}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
<ActionIcon
|
||||
variant="subtle"
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
>
|
||||
<ChevronDown
|
||||
size={18}
|
||||
style={{
|
||||
transform: isExpanded ? 'rotate(180deg)' : 'rotate(0deg)',
|
||||
transition: 'transform 0.2s',
|
||||
}}
|
||||
/>
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
{isExpanded && (
|
||||
<>
|
||||
{totalEvents > eventsLimit && (
|
||||
<Group justify="space-between" align="center" mt="sm" mb="xs">
|
||||
<Text size="xs" c="dimmed">
|
||||
Showing {offset + 1}-
|
||||
{Math.min(offset + eventsLimit, totalEvents)} of {totalEvents}
|
||||
</Text>
|
||||
<Pagination
|
||||
total={totalPages}
|
||||
value={currentPage}
|
||||
onChange={setCurrentPage}
|
||||
size="sm"
|
||||
/>
|
||||
</Group>
|
||||
)}
|
||||
<Stack
|
||||
gap="xs"
|
||||
mt="sm"
|
||||
style={{
|
||||
maxHeight: '60vh',
|
||||
overflowY: 'auto',
|
||||
}}
|
||||
>
|
||||
{events.length === 0 ? (
|
||||
<Text size="sm" c="dimmed" ta="center" py="xl">
|
||||
No events recorded yet
|
||||
</Text>
|
||||
) : (
|
||||
events.map((event) => (
|
||||
<Box
|
||||
key={event.id}
|
||||
p="xs"
|
||||
style={{
|
||||
backgroundColor: '#1A1B1E',
|
||||
borderRadius: '4px',
|
||||
borderLeft: `3px solid var(--mantine-color-${getEventColor(event.event_type)}-6)`,
|
||||
}}
|
||||
>
|
||||
<Group justify="space-between" wrap="nowrap">
|
||||
<Group gap="xs" style={{ flex: 1, minWidth: 0 }}>
|
||||
<Box c={`${getEventColor(event.event_type)}.6`}>
|
||||
{getEventIcon(event.event_type)}
|
||||
</Box>
|
||||
<Stack gap={2} style={{ flex: 1, minWidth: 0 }}>
|
||||
<Group gap="xs" wrap="nowrap">
|
||||
<Text size="sm" fw={500}>
|
||||
{event.event_type_display || event.event_type}
|
||||
</Text>
|
||||
{event.channel_name && (
|
||||
<Text
|
||||
size="sm"
|
||||
c="dimmed"
|
||||
truncate
|
||||
style={{ maxWidth: '300px' }}
|
||||
>
|
||||
{event.channel_name}
|
||||
</Text>
|
||||
)}
|
||||
</Group>
|
||||
{event.details &&
|
||||
Object.keys(event.details).length > 0 && (
|
||||
<Text size="xs" c="dimmed">
|
||||
{Object.entries(event.details)
|
||||
.filter(
|
||||
([key]) =>
|
||||
!['stream_url', 'new_url'].includes(key)
|
||||
)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join(', ')}
|
||||
</Text>
|
||||
)}
|
||||
</Stack>
|
||||
</Group>
|
||||
<Text size="xs" c="dimmed" style={{ whiteSpace: 'nowrap' }}>
|
||||
{dayjs(event.timestamp).format(`${dateFormat} HH:mm:ss`)}
|
||||
</Text>
|
||||
</Group>
|
||||
</Box>
|
||||
))
|
||||
)}
|
||||
</Stack>
|
||||
</>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default SystemEvents;
|
||||
|
|
@ -694,7 +694,8 @@ const VODModal = ({ vod, opened, onClose }) => {
|
|||
src={trailerUrl}
|
||||
title="YouTube Trailer"
|
||||
frameBorder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
referrerPolicy="strict-origin-when-cross-origin"
|
||||
allowFullScreen
|
||||
style={{
|
||||
position: 'absolute',
|
||||
|
|
|
|||
978
frontend/src/components/backups/BackupManager.jsx
Normal file
978
frontend/src/components/backups/BackupManager.jsx
Normal file
|
|
@ -0,0 +1,978 @@
|
|||
import { useEffect, useMemo, useState } from 'react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
Button,
|
||||
FileInput,
|
||||
Flex,
|
||||
Group,
|
||||
Loader,
|
||||
Modal,
|
||||
NumberInput,
|
||||
Paper,
|
||||
Select,
|
||||
Stack,
|
||||
Switch,
|
||||
Text,
|
||||
TextInput,
|
||||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import {
|
||||
Download,
|
||||
RefreshCcw,
|
||||
RotateCcw,
|
||||
SquareMinus,
|
||||
SquarePlus,
|
||||
UploadCloud,
|
||||
} from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
import API from '../../api';
|
||||
import ConfirmationDialog from '../ConfirmationDialog';
|
||||
import useLocalStorage from '../../hooks/useLocalStorage';
|
||||
import useWarningsStore from '../../store/warnings';
|
||||
import { CustomTable, useTable } from '../tables/CustomTable';
|
||||
|
||||
const RowActions = ({
|
||||
row,
|
||||
handleDownload,
|
||||
handleRestoreClick,
|
||||
handleDeleteClick,
|
||||
downloading,
|
||||
}) => {
|
||||
return (
|
||||
<Flex gap={4} wrap="nowrap">
|
||||
<Tooltip label="Download">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="blue.5"
|
||||
onClick={() => handleDownload(row.original.name)}
|
||||
loading={downloading === row.original.name}
|
||||
disabled={downloading !== null}
|
||||
>
|
||||
<Download size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
<Tooltip label="Restore">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="yellow.5"
|
||||
onClick={() => handleRestoreClick(row.original)}
|
||||
>
|
||||
<RotateCcw size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
<Tooltip label="Delete">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
size="sm"
|
||||
color="red.9"
|
||||
onClick={() => handleDeleteClick(row.original)}
|
||||
>
|
||||
<SquareMinus size={18} />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
// Convert 24h time string to 12h format with period
|
||||
function to12Hour(time24) {
|
||||
if (!time24) return { time: '12:00', period: 'AM' };
|
||||
const [hours, minutes] = time24.split(':').map(Number);
|
||||
const period = hours >= 12 ? 'PM' : 'AM';
|
||||
const hours12 = hours % 12 || 12;
|
||||
return {
|
||||
time: `${hours12}:${String(minutes).padStart(2, '0')}`,
|
||||
period,
|
||||
};
|
||||
}
|
||||
|
||||
// Convert 12h time + period to 24h format
|
||||
function to24Hour(time12, period) {
|
||||
if (!time12) return '00:00';
|
||||
const [hours, minutes] = time12.split(':').map(Number);
|
||||
let hours24 = hours;
|
||||
if (period === 'PM' && hours !== 12) {
|
||||
hours24 = hours + 12;
|
||||
} else if (period === 'AM' && hours === 12) {
|
||||
hours24 = 0;
|
||||
}
|
||||
return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
// Get default timezone (same as Settings page)
|
||||
function getDefaultTimeZone() {
|
||||
try {
|
||||
return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
|
||||
} catch {
|
||||
return 'UTC';
|
||||
}
|
||||
}
|
||||
|
||||
// Validate cron expression
|
||||
function validateCronExpression(expression) {
|
||||
if (!expression || expression.trim() === '') {
|
||||
return { valid: false, error: 'Cron expression is required' };
|
||||
}
|
||||
|
||||
const parts = expression.trim().split(/\s+/);
|
||||
if (parts.length !== 5) {
|
||||
return {
|
||||
valid: false,
|
||||
error:
|
||||
'Cron expression must have exactly 5 parts: minute hour day month weekday',
|
||||
};
|
||||
}
|
||||
|
||||
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts;
|
||||
|
||||
// Validate each part (allowing *, */N steps, ranges, lists, steps)
|
||||
// Supports: *, */2, 5, 1-5, 1-5/2, 1,3,5, etc.
|
||||
const cronPartRegex =
|
||||
/^(\*\/\d+|\*|\d+(-\d+)?(\/\d+)?(,\d+(-\d+)?(\/\d+)?)*)$/;
|
||||
|
||||
if (!cronPartRegex.test(minute)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid minute field (0-59, *, or cron syntax)',
|
||||
};
|
||||
}
|
||||
if (!cronPartRegex.test(hour)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid hour field (0-23, *, or cron syntax)',
|
||||
};
|
||||
}
|
||||
if (!cronPartRegex.test(dayOfMonth)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid day field (1-31, *, or cron syntax)',
|
||||
};
|
||||
}
|
||||
if (!cronPartRegex.test(month)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid month field (1-12, *, or cron syntax)',
|
||||
};
|
||||
}
|
||||
if (!cronPartRegex.test(dayOfWeek)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid weekday field (0-6, *, or cron syntax)',
|
||||
};
|
||||
}
|
||||
|
||||
// Additional range validation for numeric values
|
||||
const validateRange = (value, min, max, name) => {
|
||||
// Skip if it's * or contains special characters
|
||||
if (
|
||||
value === '*' ||
|
||||
value.includes('/') ||
|
||||
value.includes('-') ||
|
||||
value.includes(',')
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
const num = parseInt(value, 10);
|
||||
if (isNaN(num) || num < min || num > max) {
|
||||
return `${name} must be between ${min} and ${max}`;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const minuteError = validateRange(minute, 0, 59, 'Minute');
|
||||
if (minuteError) return { valid: false, error: minuteError };
|
||||
|
||||
const hourError = validateRange(hour, 0, 23, 'Hour');
|
||||
if (hourError) return { valid: false, error: hourError };
|
||||
|
||||
const dayError = validateRange(dayOfMonth, 1, 31, 'Day');
|
||||
if (dayError) return { valid: false, error: dayError };
|
||||
|
||||
const monthError = validateRange(month, 1, 12, 'Month');
|
||||
if (monthError) return { valid: false, error: monthError };
|
||||
|
||||
const weekdayError = validateRange(dayOfWeek, 0, 6, 'Weekday');
|
||||
if (weekdayError) return { valid: false, error: weekdayError };
|
||||
|
||||
return { valid: true, error: null };
|
||||
}
|
||||
|
||||
const DAYS_OF_WEEK = [
|
||||
{ value: '0', label: 'Sunday' },
|
||||
{ value: '1', label: 'Monday' },
|
||||
{ value: '2', label: 'Tuesday' },
|
||||
{ value: '3', label: 'Wednesday' },
|
||||
{ value: '4', label: 'Thursday' },
|
||||
{ value: '5', label: 'Friday' },
|
||||
{ value: '6', label: 'Saturday' },
|
||||
];
|
||||
|
||||
function formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
export default function BackupManager() {
|
||||
const [backups, setBackups] = useState([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [creating, setCreating] = useState(false);
|
||||
const [downloading, setDownloading] = useState(null);
|
||||
const [uploadFile, setUploadFile] = useState(null);
|
||||
const [uploadModalOpen, setUploadModalOpen] = useState(false);
|
||||
const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false);
|
||||
const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false);
|
||||
const [selectedBackup, setSelectedBackup] = useState(null);
|
||||
const [restoring, setRestoring] = useState(false);
|
||||
const [deleting, setDeleting] = useState(false);
|
||||
|
||||
// Read user's preferences from settings
|
||||
const [timeFormat] = useLocalStorage('time-format', '12h');
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const [tableSize] = useLocalStorage('table-size', 'default');
|
||||
const [userTimezone] = useLocalStorage('time-zone', getDefaultTimeZone());
|
||||
const is12Hour = timeFormat === '12h';
|
||||
|
||||
// Format date according to user preferences
|
||||
const formatDate = (dateString) => {
|
||||
const date = dayjs(dateString);
|
||||
const datePart = dateFormatSetting === 'mdy' ? 'MM/DD/YYYY' : 'DD/MM/YYYY';
|
||||
const timePart = is12Hour ? 'h:mm:ss A' : 'HH:mm:ss';
|
||||
return date.format(`${datePart}, ${timePart}`);
|
||||
};
|
||||
|
||||
// Warning suppression for confirmation dialogs
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
|
||||
// Schedule state
|
||||
const [schedule, setSchedule] = useState({
|
||||
enabled: false,
|
||||
frequency: 'daily',
|
||||
time: '03:00',
|
||||
day_of_week: 0,
|
||||
retention_count: 0,
|
||||
cron_expression: '',
|
||||
});
|
||||
const [scheduleLoading, setScheduleLoading] = useState(false);
|
||||
const [scheduleSaving, setScheduleSaving] = useState(false);
|
||||
const [scheduleChanged, setScheduleChanged] = useState(false);
|
||||
const [advancedMode, setAdvancedMode] = useState(false);
|
||||
const [cronError, setCronError] = useState(null);
|
||||
|
||||
// For 12-hour display mode
|
||||
const [displayTime, setDisplayTime] = useState('3:00');
|
||||
const [timePeriod, setTimePeriod] = useState('AM');
|
||||
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
header: 'Filename',
|
||||
accessorKey: 'name',
|
||||
grow: true,
|
||||
cell: ({ cell }) => (
|
||||
<div
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{cell.getValue()}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Size',
|
||||
accessorKey: 'size',
|
||||
size: 80,
|
||||
cell: ({ cell }) => (
|
||||
<Text size="sm">{formatBytes(cell.getValue())}</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Created',
|
||||
accessorKey: 'created',
|
||||
minSize: 180,
|
||||
cell: ({ cell }) => (
|
||||
<Text size="sm" style={{ whiteSpace: 'nowrap' }}>
|
||||
{formatDate(cell.getValue())}
|
||||
</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
header: 'Actions',
|
||||
size: tableSize === 'compact' ? 75 : 100,
|
||||
},
|
||||
],
|
||||
[tableSize]
|
||||
);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
return (
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
const renderBodyCell = ({ cell, row }) => {
|
||||
switch (cell.column.id) {
|
||||
case 'actions':
|
||||
return (
|
||||
<RowActions
|
||||
row={row}
|
||||
handleDownload={handleDownload}
|
||||
handleRestoreClick={handleRestoreClick}
|
||||
handleDeleteClick={handleDeleteClick}
|
||||
downloading={downloading}
|
||||
/>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const table = useTable({
|
||||
columns,
|
||||
data: backups,
|
||||
allRowIds: backups.map((b) => b.name),
|
||||
bodyCellRenderFns: {
|
||||
actions: renderBodyCell,
|
||||
},
|
||||
headerCellRenderFns: {
|
||||
name: renderHeaderCell,
|
||||
size: renderHeaderCell,
|
||||
created: renderHeaderCell,
|
||||
actions: renderHeaderCell,
|
||||
},
|
||||
});
|
||||
|
||||
const loadBackups = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const backupList = await API.listBackups();
|
||||
setBackups(backupList);
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to load backups',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const loadSchedule = async () => {
|
||||
setScheduleLoading(true);
|
||||
try {
|
||||
const settings = await API.getBackupSchedule();
|
||||
|
||||
// Check if using cron expression (advanced mode)
|
||||
if (settings.cron_expression) {
|
||||
setAdvancedMode(true);
|
||||
}
|
||||
|
||||
setSchedule(settings);
|
||||
|
||||
// Initialize 12-hour display values
|
||||
const { time, period } = to12Hour(settings.time);
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
|
||||
setScheduleChanged(false);
|
||||
} catch (error) {
|
||||
// Ignore errors on initial load - settings may not exist yet
|
||||
} finally {
|
||||
setScheduleLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
loadBackups();
|
||||
loadSchedule();
|
||||
}, []);
|
||||
|
||||
// Validate cron expression when switching to advanced mode
|
||||
useEffect(() => {
|
||||
if (advancedMode && schedule.cron_expression) {
|
||||
const validation = validateCronExpression(schedule.cron_expression);
|
||||
setCronError(validation.valid ? null : validation.error);
|
||||
} else {
|
||||
setCronError(null);
|
||||
}
|
||||
}, [advancedMode, schedule.cron_expression]);
|
||||
|
||||
const handleScheduleChange = (field, value) => {
|
||||
setSchedule((prev) => ({ ...prev, [field]: value }));
|
||||
setScheduleChanged(true);
|
||||
|
||||
// Validate cron expression if in advanced mode
|
||||
if (field === 'cron_expression' && advancedMode) {
|
||||
const validation = validateCronExpression(value);
|
||||
setCronError(validation.valid ? null : validation.error);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle time changes in 12-hour mode
|
||||
const handleTimeChange12h = (newTime, newPeriod) => {
|
||||
const time = newTime ?? displayTime;
|
||||
const period = newPeriod ?? timePeriod;
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
// Convert to 24h and update schedule
|
||||
const time24 = to24Hour(time, period);
|
||||
handleScheduleChange('time', time24);
|
||||
};
|
||||
|
||||
// Handle time changes in 24-hour mode
|
||||
const handleTimeChange24h = (value) => {
|
||||
handleScheduleChange('time', value);
|
||||
// Also update 12h display state in case user switches formats
|
||||
const { time, period } = to12Hour(value);
|
||||
setDisplayTime(time);
|
||||
setTimePeriod(period);
|
||||
};
|
||||
|
||||
const handleSaveSchedule = async () => {
|
||||
setScheduleSaving(true);
|
||||
try {
|
||||
const scheduleToSave = advancedMode
|
||||
? schedule
|
||||
: { ...schedule, cron_expression: '' };
|
||||
|
||||
const updated = await API.updateBackupSchedule(scheduleToSave);
|
||||
setSchedule(updated);
|
||||
setScheduleChanged(false);
|
||||
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup schedule saved',
|
||||
color: 'green',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to save schedule',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setScheduleSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateBackup = async () => {
|
||||
setCreating(true);
|
||||
try {
|
||||
await API.createBackup();
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to create backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setCreating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownload = async (filename) => {
|
||||
setDownloading(filename);
|
||||
try {
|
||||
await API.downloadBackup(filename);
|
||||
notifications.show({
|
||||
title: 'Download Started',
|
||||
message: `Downloading ${filename}...`,
|
||||
color: 'blue',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to download backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setDownloading(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteClick = (backup) => {
|
||||
setSelectedBackup(backup);
|
||||
setDeleteConfirmOpen(true);
|
||||
};
|
||||
|
||||
const handleDeleteConfirm = async () => {
|
||||
setDeleting(true);
|
||||
try {
|
||||
await API.deleteBackup(selectedBackup.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup deleted successfully',
|
||||
color: 'green',
|
||||
});
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to delete backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setDeleting(false);
|
||||
setDeleteConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRestoreClick = (backup) => {
|
||||
setSelectedBackup(backup);
|
||||
setRestoreConfirmOpen(true);
|
||||
};
|
||||
|
||||
const handleRestoreConfirm = async () => {
|
||||
setRestoring(true);
|
||||
try {
|
||||
await API.restoreBackup(selectedBackup.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message:
|
||||
'Backup restored successfully. You may need to refresh the page.',
|
||||
color: 'green',
|
||||
});
|
||||
setTimeout(() => window.location.reload(), 2000);
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to restore backup',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setRestoring(false);
|
||||
setRestoreConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUploadSubmit = async () => {
|
||||
if (!uploadFile) return;
|
||||
|
||||
try {
|
||||
await API.uploadBackup(uploadFile);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Backup uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
await loadBackups();
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error?.message || 'Failed to upload backup',
|
||||
color: 'red',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Stack gap="md">
|
||||
{/* Schedule Settings */}
|
||||
<Stack gap="sm">
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" fw={500}>
|
||||
Scheduled Backups
|
||||
</Text>
|
||||
<Switch
|
||||
checked={schedule.enabled}
|
||||
onChange={(e) =>
|
||||
handleScheduleChange('enabled', e.currentTarget.checked)
|
||||
}
|
||||
label={schedule.enabled ? 'Enabled' : 'Disabled'}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" fw={500}>
|
||||
Advanced (Cron Expression)
|
||||
</Text>
|
||||
<Switch
|
||||
checked={advancedMode}
|
||||
onChange={(e) => setAdvancedMode(e.currentTarget.checked)}
|
||||
label={advancedMode ? 'Enabled' : 'Disabled'}
|
||||
disabled={!schedule.enabled}
|
||||
size="sm"
|
||||
/>
|
||||
</Group>
|
||||
|
||||
{scheduleLoading ? (
|
||||
<Loader size="sm" />
|
||||
) : (
|
||||
<>
|
||||
{advancedMode ? (
|
||||
<>
|
||||
<Stack gap="sm">
|
||||
<TextInput
|
||||
label="Cron Expression"
|
||||
value={schedule.cron_expression}
|
||||
onChange={(e) =>
|
||||
handleScheduleChange(
|
||||
'cron_expression',
|
||||
e.currentTarget.value
|
||||
)
|
||||
}
|
||||
placeholder="0 3 * * *"
|
||||
description="Format: minute hour day month weekday (e.g., '0 3 * * *' = 3:00 AM daily)"
|
||||
disabled={!schedule.enabled}
|
||||
error={cronError}
|
||||
/>
|
||||
<Text size="xs" c="dimmed">
|
||||
Examples: <br />• <code>0 3 * * *</code> - Every day at 3:00
|
||||
AM
|
||||
<br />• <code>0 2 * * 0</code> - Every Sunday at 2:00 AM
|
||||
<br />• <code>0 */6 * * *</code> - Every 6 hours
|
||||
<br />• <code>30 14 1 * *</code> - 1st of every month at
|
||||
2:30 PM
|
||||
</Text>
|
||||
</Stack>
|
||||
<Group grow align="flex-end">
|
||||
<NumberInput
|
||||
label="Retention"
|
||||
description="0 = keep all"
|
||||
value={schedule.retention_count}
|
||||
onChange={(value) =>
|
||||
handleScheduleChange('retention_count', value || 0)
|
||||
}
|
||||
min={0}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
<Button
|
||||
onClick={handleSaveSchedule}
|
||||
loading={scheduleSaving}
|
||||
disabled={!scheduleChanged || (advancedMode && cronError)}
|
||||
variant="default"
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
</Group>
|
||||
</>
|
||||
) : (
|
||||
<Stack gap="sm">
|
||||
<Group align="flex-end" gap="xs" wrap="nowrap">
|
||||
<Select
|
||||
label="Frequency"
|
||||
value={schedule.frequency}
|
||||
onChange={(value) =>
|
||||
handleScheduleChange('frequency', value)
|
||||
}
|
||||
data={[
|
||||
{ value: 'daily', label: 'Daily' },
|
||||
{ value: 'weekly', label: 'Weekly' },
|
||||
]}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
{schedule.frequency === 'weekly' && (
|
||||
<Select
|
||||
label="Day"
|
||||
value={String(schedule.day_of_week)}
|
||||
onChange={(value) =>
|
||||
handleScheduleChange('day_of_week', parseInt(value, 10))
|
||||
}
|
||||
data={DAYS_OF_WEEK}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
)}
|
||||
{is12Hour ? (
|
||||
<>
|
||||
<Select
|
||||
label="Hour"
|
||||
value={displayTime ? displayTime.split(':')[0] : '12'}
|
||||
onChange={(value) => {
|
||||
const minute = displayTime
|
||||
? displayTime.split(':')[1]
|
||||
: '00';
|
||||
handleTimeChange12h(`${value}:${minute}`, null);
|
||||
}}
|
||||
data={Array.from({ length: 12 }, (_, i) => ({
|
||||
value: String(i + 1),
|
||||
label: String(i + 1),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Minute"
|
||||
value={displayTime ? displayTime.split(':')[1] : '00'}
|
||||
onChange={(value) => {
|
||||
const hour = displayTime
|
||||
? displayTime.split(':')[0]
|
||||
: '12';
|
||||
handleTimeChange12h(`${hour}:${value}`, null);
|
||||
}}
|
||||
data={Array.from({ length: 60 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Period"
|
||||
value={timePeriod}
|
||||
onChange={(value) => handleTimeChange12h(null, value)}
|
||||
data={[
|
||||
{ value: 'AM', label: 'AM' },
|
||||
{ value: 'PM', label: 'PM' },
|
||||
]}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Select
|
||||
label="Hour"
|
||||
value={
|
||||
schedule.time ? schedule.time.split(':')[0] : '00'
|
||||
}
|
||||
onChange={(value) => {
|
||||
const minute = schedule.time
|
||||
? schedule.time.split(':')[1]
|
||||
: '00';
|
||||
handleTimeChange24h(`${value}:${minute}`);
|
||||
}}
|
||||
data={Array.from({ length: 24 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
<Select
|
||||
label="Minute"
|
||||
value={
|
||||
schedule.time ? schedule.time.split(':')[1] : '00'
|
||||
}
|
||||
onChange={(value) => {
|
||||
const hour = schedule.time
|
||||
? schedule.time.split(':')[0]
|
||||
: '00';
|
||||
handleTimeChange24h(`${hour}:${value}`);
|
||||
}}
|
||||
data={Array.from({ length: 60 }, (_, i) => ({
|
||||
value: String(i).padStart(2, '0'),
|
||||
label: String(i).padStart(2, '0'),
|
||||
}))}
|
||||
disabled={!schedule.enabled}
|
||||
searchable
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</Group>
|
||||
<Group grow align="flex-end" gap="xs">
|
||||
<NumberInput
|
||||
label="Retention"
|
||||
description="0 = keep all"
|
||||
value={schedule.retention_count}
|
||||
onChange={(value) =>
|
||||
handleScheduleChange('retention_count', value || 0)
|
||||
}
|
||||
min={0}
|
||||
disabled={!schedule.enabled}
|
||||
/>
|
||||
<Button
|
||||
onClick={handleSaveSchedule}
|
||||
loading={scheduleSaving}
|
||||
disabled={!scheduleChanged}
|
||||
variant="default"
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
)}
|
||||
|
||||
{/* Timezone info - only show in simple mode */}
|
||||
{!advancedMode && schedule.enabled && schedule.time && (
|
||||
<Text size="xs" c="dimmed" mt="xs">
|
||||
System Timezone: {userTimezone} • Backup will run at{' '}
|
||||
{schedule.time} {userTimezone}
|
||||
</Text>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
|
||||
{/* Backups List */}
|
||||
<Stack gap={0}>
|
||||
<Paper>
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
padding: 10,
|
||||
}}
|
||||
>
|
||||
<Flex gap={6}>
|
||||
<Tooltip label="Upload existing backup">
|
||||
<Button
|
||||
leftSection={<UploadCloud size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => setUploadModalOpen(true)}
|
||||
p={5}
|
||||
>
|
||||
Upload
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Tooltip label="Refresh list">
|
||||
<Button
|
||||
leftSection={<RefreshCcw size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={loadBackups}
|
||||
loading={loading}
|
||||
p={5}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<Tooltip label="Create new backup">
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={handleCreateBackup}
|
||||
loading={creating}
|
||||
p={5}
|
||||
color="green"
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: 'green',
|
||||
color: 'white',
|
||||
}}
|
||||
>
|
||||
Create Backup
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
maxHeight: 300,
|
||||
width: '100%',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
flex: 1,
|
||||
overflowY: 'auto',
|
||||
overflowX: 'auto',
|
||||
border: 'solid 1px rgb(68,68,68)',
|
||||
borderRadius: 'var(--mantine-radius-default)',
|
||||
}}
|
||||
>
|
||||
{loading ? (
|
||||
<Box p="xl" style={{ display: 'flex', justifyContent: 'center' }}>
|
||||
<Loader />
|
||||
</Box>
|
||||
) : backups.length === 0 ? (
|
||||
<Text size="sm" c="dimmed" p="md" ta="center">
|
||||
No backups found. Create one to get started.
|
||||
</Text>
|
||||
) : (
|
||||
<div style={{ minWidth: 500 }}>
|
||||
<CustomTable table={table} />
|
||||
</div>
|
||||
)}
|
||||
</Box>
|
||||
</Box>
|
||||
</Stack>
|
||||
|
||||
<Modal
|
||||
opened={uploadModalOpen}
|
||||
onClose={() => {
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
}}
|
||||
title="Upload Backup"
|
||||
>
|
||||
<Stack>
|
||||
<FileInput
|
||||
label="Select backup file"
|
||||
placeholder="Choose a .zip file"
|
||||
accept=".zip,application/zip,application/x-zip-compressed"
|
||||
value={uploadFile}
|
||||
onChange={setUploadFile}
|
||||
/>
|
||||
<Group justify="flex-end">
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setUploadModalOpen(false);
|
||||
setUploadFile(null);
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleUploadSubmit}
|
||||
disabled={!uploadFile}
|
||||
variant="default"
|
||||
>
|
||||
Upload
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</Modal>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={restoreConfirmOpen}
|
||||
onClose={() => {
|
||||
setRestoreConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}}
|
||||
onConfirm={handleRestoreConfirm}
|
||||
title="Restore Backup"
|
||||
message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`}
|
||||
confirmLabel="Restore"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="restore-backup"
|
||||
onSuppressChange={suppressWarning}
|
||||
loading={restoring}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={deleteConfirmOpen}
|
||||
onClose={() => {
|
||||
setDeleteConfirmOpen(false);
|
||||
setSelectedBackup(null);
|
||||
}}
|
||||
onConfirm={handleDeleteConfirm}
|
||||
title="Delete Backup"
|
||||
message={`Are you sure you want to delete "${selectedBackup?.name}"? This action cannot be undone.`}
|
||||
confirmLabel="Delete"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="delete-backup"
|
||||
onSuppressChange={suppressWarning}
|
||||
loading={deleting}
|
||||
/>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
258
frontend/src/components/cards/PluginCard.jsx
Normal file
258
frontend/src/components/cards/PluginCard.jsx
Normal file
|
|
@ -0,0 +1,258 @@
|
|||
import React, { useState } from 'react';
|
||||
import { showNotification } from '../../utils/notificationUtils.js';
|
||||
import { Field } from '../Field.jsx';
|
||||
import {
|
||||
ActionIcon,
|
||||
Button,
|
||||
Card,
|
||||
Divider,
|
||||
Group,
|
||||
Stack,
|
||||
Switch,
|
||||
Text,
|
||||
} from '@mantine/core';
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import { getConfirmationDetails } from '../../utils/cards/PluginCardUtils.js';
|
||||
|
||||
const PluginFieldList = ({ plugin, settings, updateField }) => {
|
||||
return plugin.fields.map((f) => (
|
||||
<Field
|
||||
key={f.id}
|
||||
field={f}
|
||||
value={settings?.[f.id]}
|
||||
onChange={updateField}
|
||||
/>
|
||||
));
|
||||
};
|
||||
|
||||
const PluginActionList = ({ plugin, enabled, running, handlePluginRun }) => {
|
||||
return plugin.actions.map((action) => (
|
||||
<Group key={action.id} justify="space-between">
|
||||
<div>
|
||||
<Text>{action.label}</Text>
|
||||
{action.description && (
|
||||
<Text size="sm" c="dimmed">
|
||||
{action.description}
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
loading={running}
|
||||
disabled={!enabled}
|
||||
onClick={() => handlePluginRun(action)}
|
||||
size="xs"
|
||||
>
|
||||
{running ? 'Running…' : 'Run'}
|
||||
</Button>
|
||||
</Group>
|
||||
));
|
||||
};
|
||||
|
||||
const PluginActionStatus = ({ running, lastResult }) => {
|
||||
return (
|
||||
<>
|
||||
{running && (
|
||||
<Text size="sm" c="dimmed">
|
||||
Running action… please wait
|
||||
</Text>
|
||||
)}
|
||||
{!running && lastResult?.file && (
|
||||
<Text size="sm" c="dimmed">
|
||||
Output: {lastResult.file}
|
||||
</Text>
|
||||
)}
|
||||
{!running && lastResult?.error && (
|
||||
<Text size="sm" c="red">
|
||||
Error: {String(lastResult.error)}
|
||||
</Text>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const PluginCard = ({
|
||||
plugin,
|
||||
onSaveSettings,
|
||||
onRunAction,
|
||||
onToggleEnabled,
|
||||
onRequireTrust,
|
||||
onRequestDelete,
|
||||
onRequestConfirm,
|
||||
}) => {
|
||||
const [settings, setSettings] = useState(plugin.settings || {});
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [running, setRunning] = useState(false);
|
||||
const [enabled, setEnabled] = useState(!!plugin.enabled);
|
||||
const [lastResult, setLastResult] = useState(null);
|
||||
|
||||
// Keep local enabled state in sync with props (e.g., after import + enable)
|
||||
React.useEffect(() => {
|
||||
setEnabled(!!plugin.enabled);
|
||||
}, [plugin.enabled]);
|
||||
// Sync settings if plugin changes identity
|
||||
React.useEffect(() => {
|
||||
setSettings(plugin.settings || {});
|
||||
}, [plugin.key]);
|
||||
|
||||
const updateField = (id, val) => {
|
||||
setSettings((prev) => ({ ...prev, [id]: val }));
|
||||
};
|
||||
|
||||
const save = async () => {
|
||||
setSaving(true);
|
||||
try {
|
||||
await onSaveSettings(plugin.key, settings);
|
||||
showNotification({
|
||||
title: 'Saved',
|
||||
message: `${plugin.name} settings updated`,
|
||||
color: 'green',
|
||||
});
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const missing = plugin.missing;
|
||||
|
||||
const handleEnableChange = () => {
|
||||
return async (e) => {
|
||||
const next = e.currentTarget.checked;
|
||||
if (next && !plugin.ever_enabled && onRequireTrust) {
|
||||
const ok = await onRequireTrust(plugin);
|
||||
if (!ok) {
|
||||
// Revert
|
||||
setEnabled(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
setEnabled(next);
|
||||
const resp = await onToggleEnabled(plugin.key, next);
|
||||
if (next && resp?.ever_enabled) {
|
||||
plugin.ever_enabled = true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const handlePluginRun = async (a) => {
|
||||
setRunning(true);
|
||||
setLastResult(null);
|
||||
try {
|
||||
// Determine if confirmation is required from action metadata or fallback field
|
||||
const { requireConfirm, confirmTitle, confirmMessage } =
|
||||
getConfirmationDetails(a, plugin, settings);
|
||||
|
||||
if (requireConfirm) {
|
||||
const confirmed = await onRequestConfirm(confirmTitle, confirmMessage);
|
||||
|
||||
if (!confirmed) {
|
||||
// User canceled, abort the action
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Save settings before running to ensure backend uses latest values
|
||||
try {
|
||||
await onSaveSettings(plugin.key, settings);
|
||||
} catch (e) {
|
||||
/* ignore, run anyway */
|
||||
}
|
||||
const resp = await onRunAction(plugin.key, a.id);
|
||||
if (resp?.success) {
|
||||
setLastResult(resp.result || {});
|
||||
const msg = resp.result?.message || 'Plugin action completed';
|
||||
showNotification({
|
||||
title: plugin.name,
|
||||
message: msg,
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
const err = resp?.error || 'Unknown error';
|
||||
setLastResult({ error: err });
|
||||
showNotification({
|
||||
title: `${plugin.name} error`,
|
||||
message: String(err),
|
||||
color: 'red',
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
setRunning(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
shadow="sm"
|
||||
radius="md"
|
||||
withBorder
|
||||
opacity={!missing && enabled ? 1 : 0.6}
|
||||
>
|
||||
<Group justify="space-between" mb="xs" align="center">
|
||||
<div>
|
||||
<Text fw={600}>{plugin.name}</Text>
|
||||
<Text size="sm" c="dimmed">
|
||||
{plugin.description}
|
||||
</Text>
|
||||
</div>
|
||||
<Group gap="xs" align="center">
|
||||
<ActionIcon
|
||||
variant="subtle"
|
||||
color="red"
|
||||
title="Delete plugin"
|
||||
onClick={() => onRequestDelete && onRequestDelete(plugin)}
|
||||
>
|
||||
<Trash2 size={16} />
|
||||
</ActionIcon>
|
||||
<Text size="xs" c="dimmed">
|
||||
v{plugin.version || '1.0.0'}
|
||||
</Text>
|
||||
<Switch
|
||||
checked={!missing && enabled}
|
||||
onChange={handleEnableChange()}
|
||||
size="xs"
|
||||
onLabel="On"
|
||||
offLabel="Off"
|
||||
disabled={missing}
|
||||
/>
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
{missing && (
|
||||
<Text size="sm" c="red">
|
||||
Missing plugin files. Re-import or delete this entry.
|
||||
</Text>
|
||||
)}
|
||||
|
||||
{!missing && plugin.fields && plugin.fields.length > 0 && (
|
||||
<Stack gap="xs" mt="sm">
|
||||
<PluginFieldList
|
||||
plugin={plugin}
|
||||
settings={settings}
|
||||
updateField={updateField}
|
||||
/>
|
||||
<Group>
|
||||
<Button loading={saving} onClick={save} variant="default" size="xs">
|
||||
Save Settings
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
)}
|
||||
|
||||
{!missing && plugin.actions && plugin.actions.length > 0 && (
|
||||
<>
|
||||
<Divider my="sm" />
|
||||
<Stack gap="xs">
|
||||
<PluginActionList
|
||||
plugin={plugin}
|
||||
enabled={enabled}
|
||||
running={running}
|
||||
handlePluginRun={handlePluginRun}
|
||||
/>
|
||||
<PluginActionStatus running={running} lastResult={lastResult} />
|
||||
</Stack>
|
||||
</>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default PluginCard;
|
||||
422
frontend/src/components/cards/RecordingCard.jsx
Normal file
422
frontend/src/components/cards/RecordingCard.jsx
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
import useChannelsStore from '../../store/channels.jsx';
|
||||
import useSettingsStore from '../../store/settings.jsx';
|
||||
import useVideoStore from '../../store/useVideoStore.jsx';
|
||||
import { useDateTimeFormat, useTimeHelpers } from '../../utils/dateTimeUtils.js';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import React from 'react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Badge,
|
||||
Box,
|
||||
Button,
|
||||
Card,
|
||||
Center,
|
||||
Flex,
|
||||
Group,
|
||||
Image,
|
||||
Modal,
|
||||
Stack,
|
||||
Text,
|
||||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import { AlertTriangle, SquareX } from 'lucide-react';
|
||||
import RecordingSynopsis from '../RecordingSynopsis';
|
||||
import {
|
||||
deleteRecordingById,
|
||||
deleteSeriesAndRule,
|
||||
getPosterUrl,
|
||||
getRecordingUrl,
|
||||
getSeasonLabel,
|
||||
getSeriesInfo,
|
||||
getShowVideoUrl,
|
||||
removeRecording,
|
||||
runComSkip,
|
||||
} from './../../utils/cards/RecordingCardUtils.js';
|
||||
|
||||
const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => {
|
||||
const channels = useChannelsStore((s) => s.channels);
|
||||
const env_mode = useSettingsStore((s) => s.environment.env_mode);
|
||||
const showVideo = useVideoStore((s) => s.showVideo);
|
||||
const fetchRecordings = useChannelsStore((s) => s.fetchRecordings);
|
||||
const { toUserTime, userNow } = useTimeHelpers();
|
||||
const [timeformat, dateformat] = useDateTimeFormat();
|
||||
|
||||
const channel = channels?.[recording.channel];
|
||||
|
||||
const customProps = recording.custom_properties || {};
|
||||
const program = customProps.program || {};
|
||||
const recordingName = program.title || 'Custom Recording';
|
||||
const subTitle = program.sub_title || '';
|
||||
const description = program.description || customProps.description || '';
|
||||
const isRecurringRule = customProps?.rule?.type === 'recurring';
|
||||
|
||||
// Poster or channel logo
|
||||
const posterUrl = getPosterUrl(
|
||||
customProps.poster_logo_id, customProps, channel?.logo?.cache_url, env_mode);
|
||||
|
||||
const start = toUserTime(recording.start_time);
|
||||
const end = toUserTime(recording.end_time);
|
||||
const now = userNow();
|
||||
const status = customProps.status;
|
||||
const isTimeActive = now.isAfter(start) && now.isBefore(end);
|
||||
const isInterrupted = status === 'interrupted';
|
||||
const isInProgress = isTimeActive; // Show as recording by time, regardless of status glitches
|
||||
const isUpcoming = now.isBefore(start);
|
||||
const isSeriesGroup = Boolean(
|
||||
recording._group_count && recording._group_count > 1
|
||||
);
|
||||
// Season/Episode display if present
|
||||
const season = customProps.season ?? program?.custom_properties?.season;
|
||||
const episode = customProps.episode ?? program?.custom_properties?.episode;
|
||||
const onscreen =
|
||||
customProps.onscreen_episode ??
|
||||
program?.custom_properties?.onscreen_episode;
|
||||
const seLabel = getSeasonLabel(season, episode, onscreen);
|
||||
|
||||
const handleWatchLive = () => {
|
||||
if (!channel) return;
|
||||
showVideo(getShowVideoUrl(channel, env_mode), 'live');
|
||||
};
|
||||
|
||||
const handleWatchRecording = () => {
|
||||
// Only enable if backend provides a playable file URL in custom properties
|
||||
const fileUrl = getRecordingUrl(customProps, env_mode);
|
||||
if (!fileUrl) return;
|
||||
|
||||
showVideo(fileUrl, 'vod', {
|
||||
name: recordingName,
|
||||
logo: { url: posterUrl },
|
||||
});
|
||||
};
|
||||
|
||||
const handleRunComskip = async (e) => {
|
||||
e?.stopPropagation?.();
|
||||
try {
|
||||
await runComSkip(recording);
|
||||
notifications.show({
|
||||
title: 'Removing commercials',
|
||||
message: 'Queued comskip for this recording',
|
||||
color: 'blue.5',
|
||||
autoClose: 2000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to queue comskip for recording', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Cancel handling for series groups
|
||||
const [cancelOpen, setCancelOpen] = React.useState(false);
|
||||
const [busy, setBusy] = React.useState(false);
|
||||
const handleCancelClick = (e) => {
|
||||
e.stopPropagation();
|
||||
if (isRecurringRule) {
|
||||
onOpenRecurring?.(recording, true);
|
||||
return;
|
||||
}
|
||||
if (isSeriesGroup) {
|
||||
setCancelOpen(true);
|
||||
} else {
|
||||
removeRecording(recording.id);
|
||||
}
|
||||
};
|
||||
|
||||
const seriesInfo = getSeriesInfo(customProps);
|
||||
|
||||
const removeUpcomingOnly = async () => {
|
||||
try {
|
||||
setBusy(true);
|
||||
await deleteRecordingById(recording.id);
|
||||
} finally {
|
||||
setBusy(false);
|
||||
setCancelOpen(false);
|
||||
try {
|
||||
await fetchRecordings();
|
||||
} catch (error) {
|
||||
console.error('Failed to refresh recordings', error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const removeSeriesAndRule = async () => {
|
||||
try {
|
||||
setBusy(true);
|
||||
await deleteSeriesAndRule(seriesInfo);
|
||||
} finally {
|
||||
setBusy(false);
|
||||
setCancelOpen(false);
|
||||
try {
|
||||
await fetchRecordings();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Failed to refresh recordings after series removal',
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleOnMainCardClick = () => {
|
||||
if (isRecurringRule) {
|
||||
onOpenRecurring?.(recording, false);
|
||||
} else {
|
||||
onOpenDetails?.(recording);
|
||||
}
|
||||
}
|
||||
|
||||
const WatchLive = () => {
|
||||
return <Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleWatchLive();
|
||||
}}
|
||||
>
|
||||
Watch Live
|
||||
</Button>;
|
||||
}
|
||||
|
||||
const WatchRecording = () => {
|
||||
return <Tooltip
|
||||
label={
|
||||
customProps.file_url || customProps.output_file_url
|
||||
? 'Watch recording'
|
||||
: 'Recording playback not available yet'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="default"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleWatchRecording();
|
||||
}}
|
||||
disabled={
|
||||
customProps.status === 'recording' || !(customProps.file_url || customProps.output_file_url)
|
||||
}
|
||||
>
|
||||
Watch
|
||||
</Button>
|
||||
</Tooltip>;
|
||||
}
|
||||
|
||||
const MainCard = (
|
||||
<Card
|
||||
shadow="sm"
|
||||
padding="md"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{
|
||||
color: '#fff',
|
||||
backgroundColor: isInterrupted ? '#2b1f20' : '#27272A',
|
||||
borderColor: isInterrupted ? '#a33' : undefined,
|
||||
height: '100%',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onClick={handleOnMainCardClick}
|
||||
>
|
||||
<Flex justify="space-between" align="center" pb={8}>
|
||||
<Group gap={8} flex={1} miw={0}>
|
||||
<Badge
|
||||
color={
|
||||
isInterrupted
|
||||
? 'red.7'
|
||||
: isInProgress
|
||||
? 'red.6'
|
||||
: isUpcoming
|
||||
? 'yellow.6'
|
||||
: 'gray.6'
|
||||
}
|
||||
>
|
||||
{isInterrupted
|
||||
? 'Interrupted'
|
||||
: isInProgress
|
||||
? 'Recording'
|
||||
: isUpcoming
|
||||
? 'Scheduled'
|
||||
: 'Completed'}
|
||||
</Badge>
|
||||
{isInterrupted && <AlertTriangle size={16} color="#ffa94d" />}
|
||||
<Stack gap={2} flex={1} miw={0}>
|
||||
<Group gap={8} wrap="nowrap">
|
||||
<Text fw={600} lineClamp={1} title={recordingName}>
|
||||
{recordingName}
|
||||
</Text>
|
||||
{isSeriesGroup && (
|
||||
<Badge color="teal" variant="filled">
|
||||
Series
|
||||
</Badge>
|
||||
)}
|
||||
{isRecurringRule && (
|
||||
<Badge color="blue" variant="light">
|
||||
Recurring
|
||||
</Badge>
|
||||
)}
|
||||
{seLabel && !isSeriesGroup && (
|
||||
<Badge color="gray" variant="light">
|
||||
{seLabel}
|
||||
</Badge>
|
||||
)}
|
||||
</Group>
|
||||
</Stack>
|
||||
</Group>
|
||||
|
||||
<Center>
|
||||
<Tooltip label={isUpcoming || isInProgress ? 'Cancel' : 'Delete'}>
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
onClick={handleCancelClick}
|
||||
>
|
||||
<SquareX size="20" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Center>
|
||||
</Flex>
|
||||
|
||||
<Flex gap="sm" align="center">
|
||||
<Image
|
||||
src={posterUrl}
|
||||
w={64}
|
||||
h={64}
|
||||
fit="contain"
|
||||
radius="sm"
|
||||
alt={recordingName}
|
||||
fallbackSrc="/logo.png"
|
||||
/>
|
||||
<Stack gap={6} flex={1}>
|
||||
{!isSeriesGroup && subTitle && (
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" c="dimmed">
|
||||
Episode
|
||||
</Text>
|
||||
<Text size="sm" fw={700} title={subTitle}>
|
||||
{subTitle}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" c="dimmed">
|
||||
Channel
|
||||
</Text>
|
||||
<Text size="sm">
|
||||
{channel ? `${channel.channel_number} • ${channel.name}` : '—'}
|
||||
</Text>
|
||||
</Group>
|
||||
|
||||
<Group justify="space-between">
|
||||
<Text size="sm" c="dimmed">
|
||||
{isSeriesGroup ? 'Next recording' : 'Time'}
|
||||
</Text>
|
||||
<Text size="sm">
|
||||
{start.format(`${dateformat}, YYYY ${timeformat}`)} – {end.format(timeformat)}
|
||||
</Text>
|
||||
</Group>
|
||||
|
||||
{!isSeriesGroup && description && (
|
||||
<RecordingSynopsis
|
||||
description={description}
|
||||
onOpen={() => onOpenDetails?.(recording)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isInterrupted && customProps.interrupted_reason && (
|
||||
<Text size="xs" c="red.4">
|
||||
{customProps.interrupted_reason}
|
||||
</Text>
|
||||
)}
|
||||
|
||||
<Group justify="flex-end" gap="xs" pt={4}>
|
||||
{isInProgress && <WatchLive />}
|
||||
|
||||
{!isUpcoming && <WatchRecording />}
|
||||
{!isUpcoming &&
|
||||
customProps?.status === 'completed' &&
|
||||
(!customProps?.comskip ||
|
||||
customProps?.comskip?.status !== 'completed') && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="light"
|
||||
color="teal"
|
||||
onClick={handleRunComskip}
|
||||
>
|
||||
Remove commercials
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
</Stack>
|
||||
</Flex>
|
||||
{/* If this card is a grouped upcoming series, show count */}
|
||||
{recording._group_count > 1 && (
|
||||
<Text
|
||||
size="xs"
|
||||
c="dimmed"
|
||||
style={{ position: 'absolute', bottom: 6, right: 12 }}
|
||||
>
|
||||
Next of {recording._group_count}
|
||||
</Text>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
if (!isSeriesGroup) return MainCard;
|
||||
|
||||
// Stacked look for series groups: render two shadow layers behind the main card
|
||||
return (
|
||||
<Box style={{ position: 'relative' }}>
|
||||
<Modal
|
||||
opened={cancelOpen}
|
||||
onClose={() => setCancelOpen(false)}
|
||||
title="Cancel Series"
|
||||
centered
|
||||
size="md"
|
||||
zIndex={9999}
|
||||
>
|
||||
<Stack gap="sm">
|
||||
<Text>This is a series rule. What would you like to cancel?</Text>
|
||||
<Group justify="flex-end">
|
||||
<Button
|
||||
variant="default"
|
||||
loading={busy}
|
||||
onClick={removeUpcomingOnly}
|
||||
>
|
||||
Only this upcoming
|
||||
</Button>
|
||||
<Button color="red" loading={busy} onClick={removeSeriesAndRule}>
|
||||
Entire series + rule
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</Modal>
|
||||
<Box
|
||||
style={{
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
transform: 'translate(10px, 10px) rotate(-1deg)',
|
||||
borderRadius: 12,
|
||||
backgroundColor: '#1f1f23',
|
||||
border: '1px solid #2f2f34',
|
||||
boxShadow: '0 6px 18px rgba(0,0,0,0.35)',
|
||||
pointerEvents: 'none',
|
||||
zIndex: 0,
|
||||
}}
|
||||
/>
|
||||
<Box
|
||||
style={{
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
transform: 'translate(5px, 5px) rotate(1deg)',
|
||||
borderRadius: 12,
|
||||
backgroundColor: '#232327',
|
||||
border: '1px solid #333',
|
||||
boxShadow: '0 4px 12px rgba(0,0,0,0.30)',
|
||||
pointerEvents: 'none',
|
||||
zIndex: 1,
|
||||
}}
|
||||
/>
|
||||
<Box style={{ position: 'relative', zIndex: 2 }}>{MainCard}</Box>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default RecordingCard;
|
||||
85
frontend/src/components/cards/SeriesCard.jsx
Normal file
85
frontend/src/components/cards/SeriesCard.jsx
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
import {
|
||||
Badge,
|
||||
Box,
|
||||
Card,
|
||||
CardSection,
|
||||
Group,
|
||||
Image,
|
||||
Stack,
|
||||
Text,
|
||||
} from '@mantine/core';
|
||||
import {Calendar, Play, Star} from "lucide-react";
|
||||
import React from "react";
|
||||
|
||||
const SeriesCard = ({ series, onClick }) => {
|
||||
return (
|
||||
<Card
|
||||
shadow="sm"
|
||||
padding="md"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{ cursor: 'pointer', backgroundColor: '#27272A' }}
|
||||
onClick={() => onClick(series)}
|
||||
>
|
||||
<CardSection>
|
||||
<Box pos="relative" h={300}>
|
||||
{series.logo?.url ? (
|
||||
<Image
|
||||
src={series.logo.url}
|
||||
height={300}
|
||||
alt={series.name}
|
||||
fit="contain"
|
||||
/>
|
||||
) : (
|
||||
<Box
|
||||
style={{
|
||||
backgroundColor: '#404040',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
h={300}
|
||||
display="flex"
|
||||
>
|
||||
<Play size={48} color="#666" />
|
||||
</Box>
|
||||
)}
|
||||
{/* Add Series badge in the same position as Movie badge */}
|
||||
<Badge pos="absolute" bottom={8} left={8} color="purple">
|
||||
Series
|
||||
</Badge>
|
||||
</Box>
|
||||
</CardSection>
|
||||
|
||||
<Stack spacing={8} mt="md">
|
||||
<Text weight={500}>{series.name}</Text>
|
||||
|
||||
<Group spacing={16}>
|
||||
{series.year && (
|
||||
<Group spacing={4}>
|
||||
<Calendar size={14} color="#666" />
|
||||
<Text size="xs" c="dimmed">
|
||||
{series.year}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
{series.rating && (
|
||||
<Group spacing={4}>
|
||||
<Star size={14} color="#666" />
|
||||
<Text size="xs" c="dimmed">
|
||||
{series.rating}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
</Group>
|
||||
|
||||
{series.genre && (
|
||||
<Text size="xs" c="dimmed" lineClamp={1}>
|
||||
{series.genre}
|
||||
</Text>
|
||||
)}
|
||||
</Stack>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default SeriesCard;
|
||||
613
frontend/src/components/cards/StreamConnectionCard.jsx
Normal file
613
frontend/src/components/cards/StreamConnectionCard.jsx
Normal file
|
|
@ -0,0 +1,613 @@
|
|||
import { useLocation } from 'react-router-dom';
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import useLocalStorage from '../../hooks/useLocalStorage.jsx';
|
||||
import usePlaylistsStore from '../../store/playlists.jsx';
|
||||
import useSettingsStore from '../../store/settings.jsx';
|
||||
import {
|
||||
ActionIcon,
|
||||
Badge,
|
||||
Box,
|
||||
Card,
|
||||
Center,
|
||||
Flex,
|
||||
Group,
|
||||
Select,
|
||||
Stack,
|
||||
Text,
|
||||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import {
|
||||
Gauge,
|
||||
HardDriveDownload,
|
||||
HardDriveUpload,
|
||||
SquareX,
|
||||
Timer,
|
||||
Users,
|
||||
Video,
|
||||
} from 'lucide-react';
|
||||
import { toFriendlyDuration } from '../../utils/dateTimeUtils.js';
|
||||
import { CustomTable, useTable } from '../tables/CustomTable/index.jsx';
|
||||
import { TableHelper } from '../../helpers/index.jsx';
|
||||
import logo from '../../images/logo.png';
|
||||
import { formatBytes, formatSpeed } from '../../utils/networkUtils.js';
|
||||
import { showNotification } from '../../utils/notificationUtils.js';
|
||||
import {
|
||||
connectedAccessor,
|
||||
durationAccessor,
|
||||
getBufferingSpeedThreshold,
|
||||
getChannelStreams,
|
||||
getLogoUrl,
|
||||
getM3uAccountsMap,
|
||||
getMatchingStreamByUrl,
|
||||
getSelectedStream,
|
||||
getStartDate,
|
||||
getStreamOptions,
|
||||
getStreamsByIds,
|
||||
switchStream,
|
||||
} from '../../utils/cards/StreamConnectionCardUtils.js';
|
||||
|
||||
// Create a separate component for each channel card to properly handle the hook
|
||||
const StreamConnectionCard = ({
|
||||
channel,
|
||||
clients,
|
||||
stopClient,
|
||||
stopChannel,
|
||||
logos,
|
||||
channelsByUUID,
|
||||
}) => {
|
||||
const location = useLocation();
|
||||
const [availableStreams, setAvailableStreams] = useState([]);
|
||||
const [isLoadingStreams, setIsLoadingStreams] = useState(false);
|
||||
const [activeStreamId, setActiveStreamId] = useState(null);
|
||||
const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile
|
||||
const [data, setData] = useState([]);
|
||||
const [previewedStream, setPreviewedStream] = useState(null);
|
||||
|
||||
// Get M3U account data from the playlists store
|
||||
const m3uAccounts = usePlaylistsStore((s) => s.playlists);
|
||||
// Get settings for speed threshold
|
||||
const settings = useSettingsStore((s) => s.settings);
|
||||
|
||||
// Get Date-format from localStorage
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
||||
const [tableSize] = useLocalStorage('table-size', 'default');
|
||||
|
||||
// Create a map of M3U account IDs to names for quick lookup
|
||||
const m3uAccountsMap = useMemo(() => {
|
||||
return getM3uAccountsMap(m3uAccounts);
|
||||
}, [m3uAccounts]);
|
||||
|
||||
// Update M3U profile information when channel data changes
|
||||
useEffect(() => {
|
||||
// If the channel data includes M3U profile information, update our state
|
||||
if (channel.m3u_profile || channel.m3u_profile_name) {
|
||||
setCurrentM3UProfile({
|
||||
name:
|
||||
channel.m3u_profile?.name ||
|
||||
channel.m3u_profile_name ||
|
||||
'Default M3U',
|
||||
});
|
||||
}
|
||||
}, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]);
|
||||
|
||||
// Fetch available streams for this channel
|
||||
useEffect(() => {
|
||||
const fetchStreams = async () => {
|
||||
setIsLoadingStreams(true);
|
||||
try {
|
||||
// Get channel ID from UUID
|
||||
const channelId = channelsByUUID[channel.channel_id];
|
||||
if (channelId) {
|
||||
const streamData = await getChannelStreams(channelId);
|
||||
|
||||
// Use streams in the order returned by the API without sorting
|
||||
setAvailableStreams(streamData);
|
||||
|
||||
// If we have a channel URL, try to find the matching stream
|
||||
if (channel.url && streamData.length > 0) {
|
||||
// Try to find matching stream based on URL
|
||||
const matchingStream = getMatchingStreamByUrl(
|
||||
streamData,
|
||||
channel.url
|
||||
);
|
||||
|
||||
if (matchingStream) {
|
||||
setActiveStreamId(matchingStream.id.toString());
|
||||
|
||||
// If the stream has M3U profile info, save it
|
||||
if (matchingStream.m3u_profile) {
|
||||
setCurrentM3UProfile(matchingStream.m3u_profile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching streams:', error);
|
||||
} finally {
|
||||
setIsLoadingStreams(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchStreams();
|
||||
}, [channel.channel_id, channel.url, channelsByUUID]);
|
||||
|
||||
useEffect(() => {
|
||||
setData(
|
||||
clients
|
||||
.filter((client) => client.channel.channel_id === channel.channel_id)
|
||||
.map((client) => ({
|
||||
id: client.client_id,
|
||||
...client,
|
||||
}))
|
||||
);
|
||||
}, [clients, channel.channel_id]);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
switch (header.id) {
|
||||
default:
|
||||
return (
|
||||
<Group>
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
</Text>
|
||||
</Group>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const renderBodyCell = ({ cell, row }) => {
|
||||
switch (cell.column.id) {
|
||||
case 'actions':
|
||||
return (
|
||||
<Box sx={{ justifyContent: 'right' }}>
|
||||
<Center>
|
||||
<Tooltip label="Disconnect client">
|
||||
<ActionIcon
|
||||
size="sm"
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() =>
|
||||
stopClient(
|
||||
row.original.channel.uuid,
|
||||
row.original.client_id
|
||||
)
|
||||
}
|
||||
>
|
||||
<SquareX size="18" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Center>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const checkStreamsAfterChange = (streamId) => {
|
||||
return async () => {
|
||||
try {
|
||||
const channelId = channelsByUUID[channel.channel_id];
|
||||
if (channelId) {
|
||||
const updatedStreamData = await getChannelStreams(channelId);
|
||||
console.log('Channel streams after switch:', updatedStreamData);
|
||||
|
||||
// Update current stream information with fresh data
|
||||
const updatedStream = getSelectedStream(updatedStreamData, streamId);
|
||||
if (updatedStream?.m3u_profile) {
|
||||
setCurrentM3UProfile(updatedStream.m3u_profile);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error checking streams after switch:', error);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// Handle stream switching
|
||||
const handleStreamChange = async (streamId) => {
|
||||
try {
|
||||
console.log('Switching to stream ID:', streamId);
|
||||
// Find the selected stream in availableStreams for debugging
|
||||
const selectedStream = getSelectedStream(availableStreams, streamId);
|
||||
console.log('Selected stream details:', selectedStream);
|
||||
|
||||
// Make sure we're passing the correct ID to the API
|
||||
const response = await switchStream(channel, streamId);
|
||||
console.log('Stream switch API response:', response);
|
||||
|
||||
// Update the local active stream ID immediately
|
||||
setActiveStreamId(streamId);
|
||||
|
||||
// Update M3U profile information if available in the response
|
||||
if (response?.m3u_profile) {
|
||||
setCurrentM3UProfile(response.m3u_profile);
|
||||
} else if (selectedStream && selectedStream.m3u_profile) {
|
||||
// Fallback to the profile from the selected stream
|
||||
setCurrentM3UProfile(selectedStream.m3u_profile);
|
||||
}
|
||||
|
||||
// Show detailed notification with stream name
|
||||
showNotification({
|
||||
title: 'Stream switching',
|
||||
message: `Switching to "${selectedStream?.name}" for ${channel.name}`,
|
||||
color: 'blue.5',
|
||||
});
|
||||
|
||||
// After a short delay, fetch streams again to confirm the switch
|
||||
setTimeout(checkStreamsAfterChange(streamId), 2000);
|
||||
} catch (error) {
|
||||
console.error('Stream switch error:', error);
|
||||
showNotification({
|
||||
title: 'Error switching stream',
|
||||
message: error.toString(),
|
||||
color: 'red.5',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const clientsColumns = useMemo(
|
||||
() => [
|
||||
{
|
||||
id: 'expand',
|
||||
size: 20,
|
||||
},
|
||||
{
|
||||
header: 'IP Address',
|
||||
accessorKey: 'ip_address',
|
||||
},
|
||||
// Updated Connected column with tooltip
|
||||
{
|
||||
id: 'connected',
|
||||
header: 'Connected',
|
||||
accessorFn: connectedAccessor(dateFormat),
|
||||
cell: ({ cell }) => (
|
||||
<Tooltip
|
||||
label={
|
||||
cell.getValue() !== 'Unknown'
|
||||
? `Connected at ${cell.getValue()}`
|
||||
: 'Unknown connection time'
|
||||
}
|
||||
>
|
||||
<Text size="xs">{cell.getValue()}</Text>
|
||||
</Tooltip>
|
||||
),
|
||||
},
|
||||
// Update Duration column with tooltip showing exact seconds
|
||||
{
|
||||
id: 'duration',
|
||||
header: 'Duration',
|
||||
accessorFn: durationAccessor(),
|
||||
cell: ({ cell, row }) => {
|
||||
const exactDuration =
|
||||
row.original.connected_since || row.original.connection_duration;
|
||||
return (
|
||||
<Tooltip
|
||||
label={
|
||||
exactDuration
|
||||
? `${exactDuration.toFixed(1)} seconds`
|
||||
: 'Unknown duration'
|
||||
}
|
||||
>
|
||||
<Text size="xs">{cell.getValue()}</Text>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
header: 'Actions',
|
||||
size: tableSize == 'compact' ? 75 : 100,
|
||||
},
|
||||
],
|
||||
[]
|
||||
);
|
||||
|
||||
const channelClientsTable = useTable({
|
||||
...TableHelper.defaultProperties,
|
||||
columns: clientsColumns,
|
||||
data,
|
||||
allRowIds: data.map((client) => client.id),
|
||||
tableCellProps: () => ({
|
||||
padding: 4,
|
||||
borderColor: '#444',
|
||||
color: '#E0E0E0',
|
||||
fontSize: '0.85rem',
|
||||
}),
|
||||
headerCellRenderFns: {
|
||||
ip_address: renderHeaderCell,
|
||||
connected: renderHeaderCell,
|
||||
duration: renderHeaderCell,
|
||||
actions: renderHeaderCell,
|
||||
},
|
||||
bodyCellRenderFns: {
|
||||
actions: renderBodyCell,
|
||||
},
|
||||
getExpandedRowHeight: (row) => {
|
||||
return 20 + 28 * row.original.streams.length;
|
||||
},
|
||||
expandedRowRenderer: ({ row }) => {
|
||||
return (
|
||||
<Box p="xs">
|
||||
<Group spacing="xs" align="flex-start">
|
||||
<Text size="xs" fw={500} color="dimmed">
|
||||
User Agent:
|
||||
</Text>
|
||||
<Text size="xs">{row.original.user_agent || 'Unknown'}</Text>
|
||||
</Group>
|
||||
</Box>
|
||||
);
|
||||
},
|
||||
mantineExpandButtonProps: ({ row, table }) => ({
|
||||
size: 'xs',
|
||||
style: {
|
||||
transform: row.getIsExpanded() ? 'rotate(180deg)' : 'rotate(-90deg)',
|
||||
transition: 'transform 0.2s',
|
||||
},
|
||||
}),
|
||||
displayColumnDefOptions: {
|
||||
'mrt-row-expand': {
|
||||
size: 15,
|
||||
header: '',
|
||||
},
|
||||
'mrt-row-actions': {
|
||||
size: 74,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get logo URL from the logos object if available
|
||||
const logoUrl = getLogoUrl(channel.logo_id, logos, previewedStream);
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true;
|
||||
// Only fetch if we have a stream_id and NO channel.name
|
||||
if (!channel.name && channel.stream_id) {
|
||||
getStreamsByIds(channel.stream_id).then((streams) => {
|
||||
if (isMounted && streams && streams.length > 0) {
|
||||
setPreviewedStream(streams[0]);
|
||||
}
|
||||
});
|
||||
}
|
||||
return () => {
|
||||
isMounted = false;
|
||||
};
|
||||
}, [channel.name, channel.stream_id]);
|
||||
|
||||
const channelName =
|
||||
channel.name || previewedStream?.name || 'Unnamed Channel';
|
||||
const uptime = channel.uptime || 0;
|
||||
const bitrates = channel.bitrates || [];
|
||||
const totalBytes = channel.total_bytes || 0;
|
||||
const clientCount = channel.client_count || 0;
|
||||
const avgBitrate = channel.avg_bitrate || '0 Kbps';
|
||||
const streamProfileName = channel.stream_profile?.name || 'Unknown Profile';
|
||||
|
||||
// Use currentM3UProfile if available, otherwise fall back to channel data
|
||||
const m3uProfileName =
|
||||
currentM3UProfile?.name ||
|
||||
channel.m3u_profile?.name ||
|
||||
channel.m3u_profile_name ||
|
||||
'Unknown M3U Profile';
|
||||
|
||||
// Create select options for available streams
|
||||
const streamOptions = getStreamOptions(availableStreams, m3uAccountsMap);
|
||||
|
||||
if (location.pathname !== '/stats') {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
// Safety check - if channel doesn't have required data, don't render
|
||||
if (!channel || !channel.channel_id) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Card
|
||||
key={channel.channel_id}
|
||||
shadow="sm"
|
||||
padding="md"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
}}
|
||||
color="#fff"
|
||||
maw={700}
|
||||
w={'100%'}
|
||||
>
|
||||
<Stack pos="relative">
|
||||
<Group justify="space-between">
|
||||
<Box
|
||||
style={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
w={100}
|
||||
h={50}
|
||||
display="flex"
|
||||
>
|
||||
<img
|
||||
src={logoUrl || logo}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
maxHeight: '100%',
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
alt="channel logo"
|
||||
/>
|
||||
</Box>
|
||||
|
||||
<Group>
|
||||
<Box>
|
||||
<Tooltip label={getStartDate(uptime)}>
|
||||
<Center>
|
||||
<Timer pr={5} />
|
||||
{toFriendlyDuration(uptime, 'seconds')}
|
||||
</Center>
|
||||
</Tooltip>
|
||||
</Box>
|
||||
<Center>
|
||||
<Tooltip label="Stop Channel">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() => stopChannel(channel.channel_id)}
|
||||
>
|
||||
<SquareX size="24" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Center>
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
<Flex justify="space-between" align="center">
|
||||
<Group>
|
||||
<Text fw={500}>{channelName}</Text>
|
||||
</Group>
|
||||
|
||||
<Tooltip label="Active Stream Profile">
|
||||
<Group gap={5}>
|
||||
<Video size="18" />
|
||||
{streamProfileName}
|
||||
</Group>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
|
||||
{/* Display M3U profile information */}
|
||||
<Flex justify="flex-end" align="center" mt={-8}>
|
||||
<Group gap={5}>
|
||||
<HardDriveUpload size="18" />
|
||||
<Tooltip label="Current M3U Profile">
|
||||
<Text size="xs">{m3uProfileName}</Text>
|
||||
</Tooltip>
|
||||
</Group>
|
||||
</Flex>
|
||||
|
||||
{/* Add stream selection dropdown */}
|
||||
{availableStreams.length > 0 && (
|
||||
<Tooltip label="Switch to another stream source">
|
||||
<Select
|
||||
size="xs"
|
||||
label="Active Stream"
|
||||
placeholder={
|
||||
isLoadingStreams ? 'Loading streams...' : 'Select stream'
|
||||
}
|
||||
data={streamOptions}
|
||||
value={activeStreamId || channel.stream_id?.toString() || null}
|
||||
onChange={handleStreamChange}
|
||||
disabled={isLoadingStreams}
|
||||
mt={8}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Add stream information badges */}
|
||||
<Group gap="xs" mt="xs">
|
||||
{channel.resolution && (
|
||||
<Tooltip label="Video resolution">
|
||||
<Badge size="sm" variant="light" color="red">
|
||||
{channel.resolution}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.source_fps && (
|
||||
<Tooltip label="Source frames per second">
|
||||
<Badge size="sm" variant="light" color="orange">
|
||||
{channel.source_fps} FPS
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.video_codec && (
|
||||
<Tooltip label="Video codec">
|
||||
<Badge size="sm" variant="light" color="blue">
|
||||
{channel.video_codec.toUpperCase()}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.audio_codec && (
|
||||
<Tooltip label="Audio codec">
|
||||
<Badge size="sm" variant="light" color="pink">
|
||||
{channel.audio_codec.toUpperCase()}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.audio_channels && (
|
||||
<Tooltip label="Audio channel configuration">
|
||||
<Badge size="sm" variant="light" color="pink">
|
||||
{channel.audio_channels}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.stream_type && (
|
||||
<Tooltip label="Stream type">
|
||||
<Badge size="sm" variant="light" color="cyan">
|
||||
{channel.stream_type.toUpperCase()}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
{channel.ffmpeg_speed && (
|
||||
<Tooltip
|
||||
label={`Current Speed: ${parseFloat(channel.ffmpeg_speed).toFixed(2)}x`}
|
||||
>
|
||||
<Badge
|
||||
size="sm"
|
||||
variant="light"
|
||||
color={
|
||||
parseFloat(channel.ffmpeg_speed) >=
|
||||
getBufferingSpeedThreshold(settings['proxy_settings'])
|
||||
? 'green'
|
||||
: 'red'
|
||||
}
|
||||
>
|
||||
{parseFloat(channel.ffmpeg_speed).toFixed(2)}x
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Group>
|
||||
|
||||
<Group justify="space-between">
|
||||
<Group gap={4}>
|
||||
<Tooltip
|
||||
label={`Current bitrate: ${formatSpeed(bitrates.at(-1) || 0)}`}
|
||||
>
|
||||
<Group gap={4} style={{ cursor: 'help' }}>
|
||||
<Gauge pr={5} size="22" />
|
||||
<Text size="sm">{formatSpeed(bitrates.at(-1) || 0)}</Text>
|
||||
</Group>
|
||||
</Tooltip>
|
||||
</Group>
|
||||
|
||||
<Tooltip label={`Average bitrate: ${avgBitrate}`}>
|
||||
<Text size="sm" style={{ cursor: 'help' }}>
|
||||
Avg: {avgBitrate}
|
||||
</Text>
|
||||
</Tooltip>
|
||||
|
||||
<Group gap={4}>
|
||||
<Tooltip label={`Total transferred: ${formatBytes(totalBytes)}`}>
|
||||
<Group gap={4} style={{ cursor: 'help' }}>
|
||||
<HardDriveDownload size="18" />
|
||||
<Text size="sm">{formatBytes(totalBytes)}</Text>
|
||||
</Group>
|
||||
</Tooltip>
|
||||
</Group>
|
||||
|
||||
<Group gap={5}>
|
||||
<Tooltip
|
||||
label={`${clientCount} active client${clientCount !== 1 ? 's' : ''}`}
|
||||
>
|
||||
<Group gap={4} style={{ cursor: 'help' }}>
|
||||
<Users size="18" />
|
||||
<Text size="sm">{clientCount}</Text>
|
||||
</Group>
|
||||
</Tooltip>
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
<CustomTable table={channelClientsTable} />
|
||||
</Stack>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default StreamConnectionCard;
|
||||
143
frontend/src/components/cards/VODCard.jsx
Normal file
143
frontend/src/components/cards/VODCard.jsx
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
import {
|
||||
ActionIcon,
|
||||
Badge,
|
||||
Box,
|
||||
Card,
|
||||
CardSection,
|
||||
Group,
|
||||
Image,
|
||||
Stack,
|
||||
Text,
|
||||
} from '@mantine/core';
|
||||
import { Calendar, Clock, Play, Star } from 'lucide-react';
|
||||
import React from 'react';
|
||||
import {
|
||||
formatDuration,
|
||||
getSeasonLabel,
|
||||
} from '../../utils/cards/VODCardUtils.js';
|
||||
|
||||
const VODCard = ({ vod, onClick }) => {
|
||||
const isEpisode = vod.type === 'episode';
|
||||
|
||||
const getDisplayTitle = () => {
|
||||
if (isEpisode && vod.series) {
|
||||
return (
|
||||
<Stack spacing={4}>
|
||||
<Text size="sm" c="dimmed">
|
||||
{vod.series.name}
|
||||
</Text>
|
||||
<Text weight={500}>
|
||||
{getSeasonLabel(vod)} - {vod.name}
|
||||
</Text>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
return <Text weight={500}>{vod.name}</Text>;
|
||||
};
|
||||
|
||||
const handleCardClick = async () => {
|
||||
// Just pass the basic vod info to the parent handler
|
||||
onClick(vod);
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
shadow="sm"
|
||||
padding="md"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{ cursor: 'pointer', backgroundColor: '#27272A' }}
|
||||
onClick={handleCardClick}
|
||||
>
|
||||
<CardSection>
|
||||
<Box pos="relative" h={300}>
|
||||
{vod.logo?.url ? (
|
||||
<Image
|
||||
src={vod.logo.url}
|
||||
height={300}
|
||||
alt={vod.name}
|
||||
fit="contain"
|
||||
/>
|
||||
) : (
|
||||
<Box
|
||||
style={{
|
||||
backgroundColor: '#404040',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
h={300}
|
||||
display="flex"
|
||||
>
|
||||
<Play size={48} color="#666" />
|
||||
</Box>
|
||||
)}
|
||||
|
||||
<ActionIcon
|
||||
style={{
|
||||
backgroundColor: 'rgba(0,0,0,0.7)',
|
||||
}}
|
||||
pos="absolute"
|
||||
top={8}
|
||||
right={8}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onClick(vod);
|
||||
}}
|
||||
>
|
||||
<Play size={16} color="white" />
|
||||
</ActionIcon>
|
||||
|
||||
<Badge
|
||||
pos="absolute"
|
||||
bottom={8}
|
||||
left={8}
|
||||
color={isEpisode ? 'blue' : 'green'}
|
||||
>
|
||||
{isEpisode ? 'Episode' : 'Movie'}
|
||||
</Badge>
|
||||
</Box>
|
||||
</CardSection>
|
||||
|
||||
<Stack spacing={8} mt="md">
|
||||
{getDisplayTitle()}
|
||||
|
||||
<Group spacing={16}>
|
||||
{vod.year && (
|
||||
<Group spacing={4}>
|
||||
<Calendar size={14} color="#666" />
|
||||
<Text size="xs" c="dimmed">
|
||||
{vod.year}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
|
||||
{vod.duration && (
|
||||
<Group spacing={4}>
|
||||
<Clock size={14} color="#666" />
|
||||
<Text size="xs" c="dimmed">
|
||||
{formatDuration(vod.duration_secs)}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
|
||||
{vod.rating && (
|
||||
<Group spacing={4}>
|
||||
<Star size={14} color="#666" />
|
||||
<Text size="xs" c="dimmed">
|
||||
{vod.rating}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
</Group>
|
||||
|
||||
{vod.genre && (
|
||||
<Text size="xs" c="dimmed" lineClamp={1}>
|
||||
{vod.genre}
|
||||
</Text>
|
||||
)}
|
||||
</Stack>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default VODCard;
|
||||
422
frontend/src/components/cards/VodConnectionCard.jsx
Normal file
422
frontend/src/components/cards/VodConnectionCard.jsx
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
// Format duration for content length
|
||||
import useLocalStorage from '../../hooks/useLocalStorage.jsx';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import logo from '../../images/logo.png';
|
||||
import { ActionIcon, Badge, Box, Card, Center, Flex, Group, Progress, Stack, Text, Tooltip } from '@mantine/core';
|
||||
import { convertToSec, fromNow, toFriendlyDuration } from '../../utils/dateTimeUtils.js';
|
||||
import { ChevronDown, HardDriveUpload, SquareX, Timer, Video } from 'lucide-react';
|
||||
import {
|
||||
calculateConnectionDuration,
|
||||
calculateConnectionStartTime,
|
||||
calculateProgress,
|
||||
formatDuration,
|
||||
formatTime,
|
||||
getEpisodeDisplayTitle,
|
||||
getEpisodeSubtitle,
|
||||
getMovieDisplayTitle,
|
||||
getMovieSubtitle,
|
||||
} from '../../utils/cards/VodConnectionCardUtils.js';
|
||||
|
||||
const ClientDetails = ({ connection, connectionStartTime }) => {
|
||||
return (
|
||||
<Stack
|
||||
gap="xs"
|
||||
style={{
|
||||
backgroundColor: 'rgba(255, 255, 255, 0.02)',
|
||||
}}
|
||||
p={12}
|
||||
bdrs={6}
|
||||
bd={'1px solid rgba(255, 255, 255, 0.08)'}
|
||||
>
|
||||
{connection.user_agent &&
|
||||
connection.user_agent !== 'Unknown' && (
|
||||
<Group gap={8} align="flex-start">
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
User Agent:
|
||||
</Text>
|
||||
<Text size="xs" ff={'monospace'} flex={1}>
|
||||
{connection.user_agent.length > 100
|
||||
? `${connection.user_agent.substring(0, 100)}...`
|
||||
: connection.user_agent}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Client ID:
|
||||
</Text>
|
||||
<Text size="xs" ff={'monospace'}>
|
||||
{connection.client_id || 'Unknown'}
|
||||
</Text>
|
||||
</Group>
|
||||
|
||||
{connection.connected_at && (
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Connected:
|
||||
</Text>
|
||||
<Text size="xs">{connectionStartTime}</Text>
|
||||
</Group>
|
||||
)}
|
||||
|
||||
{connection.duration && connection.duration > 0 && (
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Watch Duration:
|
||||
</Text>
|
||||
<Text size="xs">
|
||||
{toFriendlyDuration(connection.duration, 'seconds')}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
|
||||
{/* Seek/Position Information */}
|
||||
{(connection.last_seek_percentage > 0 ||
|
||||
connection.last_seek_byte > 0) && (
|
||||
<>
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Last Seek:
|
||||
</Text>
|
||||
<Text size="xs">
|
||||
{connection.last_seek_percentage?.toFixed(1)}%
|
||||
{connection.total_content_size > 0 && (
|
||||
<span style={{ color: 'var(--mantine-color-dimmed)' }}>
|
||||
{' '}
|
||||
({Math.round(connection.last_seek_byte / (1024 * 1024))}
|
||||
MB /{' '}
|
||||
{Math.round(
|
||||
connection.total_content_size / (1024 * 1024)
|
||||
)}
|
||||
MB)
|
||||
</span>
|
||||
)}
|
||||
</Text>
|
||||
</Group>
|
||||
|
||||
{Number(connection.last_seek_timestamp) > 0 && (
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Seek Time:
|
||||
</Text>
|
||||
<Text size="xs">
|
||||
{fromNow(convertToSec(Number(connection.last_seek_timestamp)))}
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{connection.bytes_sent > 0 && (
|
||||
<Group gap={8}>
|
||||
<Text size="xs" fw={500} c="dimmed" miw={80}>
|
||||
Data Sent:
|
||||
</Text>
|
||||
<Text size="xs">
|
||||
{(connection.bytes_sent / (1024 * 1024)).toFixed(1)} MB
|
||||
</Text>
|
||||
</Group>
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
||||
// Create a VOD Card component similar to ChannelCard
|
||||
const VodConnectionCard = ({ vodContent, stopVODClient }) => {
|
||||
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
|
||||
const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM';
|
||||
const [isClientExpanded, setIsClientExpanded] = useState(false);
|
||||
const [, setUpdateTrigger] = useState(0); // Force re-renders for progress updates
|
||||
|
||||
// Get metadata from the VOD content
|
||||
const metadata = vodContent.content_metadata || {};
|
||||
const contentType = vodContent.content_type;
|
||||
const isMovie = contentType === 'movie';
|
||||
const isEpisode = contentType === 'episode';
|
||||
|
||||
// Set up timer to update progress every second
|
||||
useEffect(() => {
|
||||
const interval = setInterval(() => {
|
||||
setUpdateTrigger((prev) => prev + 1);
|
||||
}, 1000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, []);
|
||||
|
||||
// Get the individual connection (since we now separate cards per connection)
|
||||
const connection =
|
||||
vodContent.individual_connection ||
|
||||
(vodContent.connections && vodContent.connections[0]);
|
||||
|
||||
// Get poster/logo URL
|
||||
const posterUrl = metadata.logo_url || logo;
|
||||
|
||||
// Get display title
|
||||
const getDisplayTitle = () => {
|
||||
if (isMovie) {
|
||||
return getMovieDisplayTitle(vodContent);
|
||||
} else if (isEpisode) {
|
||||
return getEpisodeDisplayTitle(metadata);
|
||||
}
|
||||
return vodContent.content_name;
|
||||
};
|
||||
|
||||
// Get subtitle info
|
||||
const getSubtitle = () => {
|
||||
if (isMovie) {
|
||||
return getMovieSubtitle(metadata);
|
||||
} else if (isEpisode) {
|
||||
return getEpisodeSubtitle(metadata);
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
// Render subtitle
|
||||
const renderSubtitle = () => {
|
||||
const subtitleParts = getSubtitle();
|
||||
if (subtitleParts.length === 0) return null;
|
||||
|
||||
return (
|
||||
<Text size="sm" c="dimmed">
|
||||
{subtitleParts.join(' • ')}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
// Calculate progress percentage and time
|
||||
const getProgressInfo = useCallback(() => {
|
||||
return calculateProgress(connection, metadata.duration_secs);
|
||||
}, [connection, metadata.duration_secs]);
|
||||
|
||||
// Calculate duration for connection
|
||||
const getConnectionDuration = useCallback((connection) => {
|
||||
return calculateConnectionDuration(connection);
|
||||
}, []);
|
||||
|
||||
// Get connection start time for tooltip
|
||||
const getConnectionStartTime = useCallback(
|
||||
(connection) => {
|
||||
return calculateConnectionStartTime(connection, dateFormat);
|
||||
},
|
||||
[dateFormat]
|
||||
);
|
||||
|
||||
return (
|
||||
<Card
|
||||
shadow="sm"
|
||||
padding="md"
|
||||
radius="md"
|
||||
withBorder
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
}}
|
||||
color='#FFF'
|
||||
maw={700}
|
||||
w={'100%'}
|
||||
>
|
||||
<Stack pos='relative' >
|
||||
{/* Header with poster and basic info */}
|
||||
<Group justify="space-between">
|
||||
<Box h={100} display='flex'
|
||||
style={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
>
|
||||
<img
|
||||
src={posterUrl}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
maxHeight: '100%',
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
alt="content poster"
|
||||
/>
|
||||
</Box>
|
||||
|
||||
<Group>
|
||||
{connection && (
|
||||
<Tooltip
|
||||
label={`Connected at ${getConnectionStartTime(connection)}`}
|
||||
>
|
||||
<Center>
|
||||
<Timer pr={5} />
|
||||
{getConnectionDuration(connection)}
|
||||
</Center>
|
||||
</Tooltip>
|
||||
)}
|
||||
{connection && stopVODClient && (
|
||||
<Center>
|
||||
<Tooltip label="Stop VOD Connection">
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() => stopVODClient(connection.client_id)}
|
||||
>
|
||||
<SquareX size="24" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
</Center>
|
||||
)}
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
{/* Title and type */}
|
||||
<Flex justify="space-between" align="center">
|
||||
<Group>
|
||||
<Text fw={500}>{getDisplayTitle()}</Text>
|
||||
</Group>
|
||||
|
||||
<Tooltip label="Content Type">
|
||||
<Group gap={5}>
|
||||
<Video size="18" />
|
||||
{isMovie ? 'Movie' : 'TV Episode'}
|
||||
</Group>
|
||||
</Tooltip>
|
||||
</Flex>
|
||||
|
||||
{/* Display M3U profile information - matching channel card style */}
|
||||
{connection &&
|
||||
connection.m3u_profile &&
|
||||
(connection.m3u_profile.profile_name ||
|
||||
connection.m3u_profile.account_name) && (
|
||||
<Flex justify="flex-end" align="flex-start" mt={-8}>
|
||||
<Group gap={5} align="flex-start">
|
||||
<HardDriveUpload size="18" mt={2} />
|
||||
<Stack gap={0}>
|
||||
<Tooltip label="M3U Account">
|
||||
<Text size="xs" fw={500}>
|
||||
{connection.m3u_profile.account_name || 'Unknown Account'}
|
||||
</Text>
|
||||
</Tooltip>
|
||||
<Tooltip label="M3U Profile">
|
||||
<Text size="xs" c="dimmed">
|
||||
{connection.m3u_profile.profile_name || 'Default Profile'}
|
||||
</Text>
|
||||
</Tooltip>
|
||||
</Stack>
|
||||
</Group>
|
||||
</Flex>
|
||||
)}
|
||||
|
||||
{/* Subtitle/episode info */}
|
||||
{getSubtitle().length > 0 && (
|
||||
<Flex justify="flex-start" align="center" mt={-12}>
|
||||
{renderSubtitle()}
|
||||
</Flex>
|
||||
)}
|
||||
|
||||
{/* Content information badges - streamlined to avoid duplication */}
|
||||
<Group gap="xs" mt={-4}>
|
||||
{metadata.year && (
|
||||
<Tooltip label="Release Year">
|
||||
<Badge size="sm" variant="light" color="orange">
|
||||
{metadata.year}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{metadata.duration_secs && (
|
||||
<Tooltip label="Content Duration">
|
||||
<Badge size="sm" variant="light" color="blue">
|
||||
{formatDuration(metadata.duration_secs)}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{metadata.rating && (
|
||||
<Tooltip label="Critic Rating (out of 10)">
|
||||
<Badge size="sm" variant="light" color="yellow">
|
||||
{parseFloat(metadata.rating).toFixed(1)}/10
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Group>
|
||||
|
||||
{/* Progress bar - show current position in content */}
|
||||
{connection &&
|
||||
metadata.duration_secs &&
|
||||
(() => {
|
||||
const { totalTime, currentTime, percentage} = getProgressInfo();
|
||||
return totalTime > 0 ? (
|
||||
<Stack gap="xs" mt="sm">
|
||||
<Group justify="space-between" align="center">
|
||||
<Text size="xs" fw={500} c="dimmed">
|
||||
Progress
|
||||
</Text>
|
||||
<Text size="xs" c="dimmed">
|
||||
{formatTime(currentTime)} /{' '}
|
||||
{formatTime(totalTime)}
|
||||
</Text>
|
||||
</Group>
|
||||
<Progress
|
||||
value={percentage}
|
||||
size="sm"
|
||||
color="blue"
|
||||
style={{
|
||||
backgroundColor: 'rgba(255, 255, 255, 0.1)',
|
||||
}}
|
||||
/>
|
||||
<Text size="xs" c="dimmed" ta="center">
|
||||
{percentage.toFixed(1)}% watched
|
||||
</Text>
|
||||
</Stack>
|
||||
) : null;
|
||||
})()}
|
||||
|
||||
{/* Client information section - collapsible like channel cards */}
|
||||
{connection && (
|
||||
<Stack gap="xs" mt="xs">
|
||||
{/* Client summary header - always visible */}
|
||||
<Group
|
||||
justify="space-between"
|
||||
align="center"
|
||||
style={{
|
||||
cursor: 'pointer',
|
||||
backgroundColor: 'rgba(255, 255, 255, 0.05)',
|
||||
}}
|
||||
p={'8px 12px'}
|
||||
bdrs={6}
|
||||
bd={'1px solid rgba(255, 255, 255, 0.1)'}
|
||||
onClick={() => setIsClientExpanded(!isClientExpanded)}
|
||||
>
|
||||
<Group gap={8}>
|
||||
<Text size="sm" fw={500} color="dimmed">
|
||||
Client:
|
||||
</Text>
|
||||
<Text size="sm" ff={'monospace'}>
|
||||
{connection.client_ip || 'Unknown IP'}
|
||||
</Text>
|
||||
</Group>
|
||||
|
||||
<Group gap={8}>
|
||||
<Text size="xs" color="dimmed">
|
||||
{isClientExpanded ? 'Hide Details' : 'Show Details'}
|
||||
</Text>
|
||||
<ChevronDown
|
||||
size={16}
|
||||
style={{
|
||||
transform: isClientExpanded
|
||||
? 'rotate(0deg)'
|
||||
: 'rotate(180deg)',
|
||||
transition: 'transform 0.2s',
|
||||
}}
|
||||
/>
|
||||
</Group>
|
||||
</Group>
|
||||
|
||||
{/* Expanded client details */}
|
||||
{isClientExpanded && (
|
||||
<ClientDetails
|
||||
connection={connection}
|
||||
connectionStartTime={getConnectionStartTime(connection)} />
|
||||
)}
|
||||
</Stack>
|
||||
)}
|
||||
</Stack>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default VodConnectionCard;
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue