mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 10:45:27 +00:00
Compare commits
1497 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8521df94ad | ||
|
|
c970cfcf9a | ||
|
|
fe60c4f3bc | ||
|
|
7cf7aecdf2 | ||
|
|
54644df9a3 | ||
|
|
38fa0fe99d | ||
|
|
a772f5c353 | ||
|
|
da186bcb9d | ||
|
|
75df00e329 | ||
|
|
d0ed682b3d | ||
|
|
60955a39c7 | ||
|
|
6c15ae940d | ||
|
|
516d0e02aa | ||
|
|
6607cef5d4 | ||
|
|
2f9b544519 | ||
|
|
36967c10ce | ||
|
|
4bfdd15b37 | ||
|
|
2a3d0db670 | ||
|
|
43636a84d0 | ||
|
|
6d5d16d667 | ||
|
|
f821dabe8e | ||
|
|
564dceb210 | ||
|
|
2e9280cf59 | ||
|
|
7594ba0a08 | ||
|
|
e8d949db86 | ||
|
|
a9a433bc5b | ||
|
|
e72e0215cb | ||
|
|
b8374fcc68 | ||
|
|
6b873be3cf | ||
|
|
edfa497203 | ||
|
|
0242eb69ee | ||
|
|
93f74c9d91 | ||
|
|
e2e6f61dee | ||
|
|
719a975210 | ||
|
|
a84553d15c | ||
|
|
cc9d38212e | ||
|
|
caf56a59f3 | ||
|
|
ba5aa861e3 | ||
|
|
312fa11cfb | ||
|
|
ad334347a9 | ||
|
|
74a9d3d0cb | ||
|
|
fa6315de33 | ||
|
|
d6c1a2369b | ||
|
|
72d9125c36 | ||
|
|
6e74c370cb | ||
|
|
10447f8c86 | ||
|
|
1a2d39de91 | ||
|
|
f389420251 | ||
|
|
3f6eff96fc | ||
|
|
02faa1a4a7 | ||
|
|
c5a3a2af81 | ||
|
|
01370e8892 | ||
|
|
8cbb55c44b | ||
|
|
0441dd7b7e | ||
|
|
30d093a2d3 | ||
|
|
518c93c398 | ||
|
|
cc09c89156 | ||
|
|
21c0758cc9 | ||
|
|
f664910bf4 | ||
|
|
bc19bf8629 | ||
|
|
16bbc1d875 | ||
|
|
9612a67412 | ||
|
|
4e65ffd113 | ||
|
|
6031885537 | ||
|
|
8ae1a98a3b | ||
|
|
48bdcfbd65 | ||
|
|
e151da27b9 | ||
|
|
fdca1fd165 | ||
|
|
9cc90354ee | ||
|
|
62b6cfa2fb | ||
|
|
3f46f28a70 | ||
|
|
058de26bdf | ||
|
|
f51463162c | ||
|
|
0cb189acba | ||
|
|
3fe5ff9130 | ||
|
|
131ebf9f55 | ||
|
|
2ed784e8c4 | ||
|
|
2e0aa90cd6 | ||
|
|
a363d9f0e6 | ||
|
|
6a985d7a7d | ||
|
|
1a67f3c8ec | ||
|
|
6bd8a0c12d | ||
|
|
6678311fa7 | ||
|
|
e8c9432f65 | ||
|
|
33f988b2c6 | ||
|
|
13e4b19960 | ||
|
|
042c34eecc | ||
|
|
ded785de54 | ||
|
|
c57f9fd7e7 | ||
|
|
b4b0774189 | ||
|
|
7b1a85617f | ||
|
|
a6361a07d2 | ||
|
|
b157159b87 | ||
|
|
d9fc0e68d6 | ||
|
|
43525ca32a | ||
|
|
ffa1331c3b | ||
|
|
26d9dbd246 | ||
|
|
f97399de07 | ||
|
|
a5688605cd | ||
|
|
ca96adf781 | ||
|
|
61247a452a | ||
|
|
fda188e738 | ||
|
|
57a6a842b2 | ||
|
|
f1c096bc94 | ||
|
|
5a4be532fd | ||
|
|
cc3ed80e1a | ||
|
|
af88756197 | ||
|
|
1b1f360705 | ||
|
|
bc3ef1a3a9 | ||
|
|
81af73a086 | ||
|
|
0abacf1fef | ||
|
|
36a39cd4de | ||
|
|
46413b7e3a | ||
|
|
874e981449 | ||
|
|
f5c6d2b576 | ||
|
|
1ef5a9ca13 | ||
|
|
2d31eca93d | ||
|
|
510c9fc617 | ||
|
|
8f63659ad7 | ||
|
|
31b9868bfd | ||
|
|
da4597ac95 | ||
|
|
523a127c81 | ||
|
|
ec3093d9af | ||
|
|
5481b18d8a | ||
|
|
bfca663870 | ||
|
|
11b3320277 | ||
|
|
44a122924f | ||
|
|
48ebaffadd | ||
|
|
daa919c764 | ||
|
|
8f811f2ed3 | ||
|
|
ff7298a93e | ||
|
|
9c9cbab94c | ||
|
|
904500906c | ||
|
|
106ea72c9d | ||
|
|
eea84cfd8b | ||
|
|
c7590d204e | ||
|
|
7a0af3445a | ||
|
|
18645fc08f | ||
|
|
aa5db6c3f4 | ||
|
|
1029eb5b5c | ||
|
|
ee183a9f75 | ||
|
|
63daa3ddf2 | ||
|
|
4cd63bc898 | ||
|
|
05b62c22ad | ||
|
|
2c12e8b872 | ||
|
|
20182c7ebf | ||
|
|
f0a9a3fc15 | ||
|
|
097551ccf7 | ||
|
|
22527b085d | ||
|
|
944736612b | ||
|
|
abc6ae94e5 | ||
|
|
5371519d8a | ||
|
|
b83f12809f | ||
|
|
601f7d0297 | ||
|
|
de31826137 | ||
|
|
e78c18c473 | ||
|
|
73956924f5 | ||
|
|
0a4d27c236 | ||
|
|
45ea63e9cf | ||
|
|
1510197bf0 | ||
|
|
9623dff6b1 | ||
|
|
3ddcadb50d | ||
|
|
1e42aa1011 | ||
|
|
ee0502f559 | ||
|
|
f43de44946 | ||
|
|
2b1d5622a6 | ||
|
|
bd148a7f14 | ||
|
|
a76a81c7f4 | ||
|
|
bd57ee3f3c | ||
|
|
2558ea0b0b | ||
|
|
2a0df81c59 | ||
|
|
1906c9955e | ||
|
|
4c60ce0c28 | ||
|
|
865ba432d3 | ||
|
|
7ea843956b | ||
|
|
98a016a418 | ||
|
|
36ec2fb1b0 | ||
|
|
dd75b5b21a | ||
|
|
38033da90f | ||
|
|
7c45542332 | ||
|
|
748d5dc72d | ||
|
|
48e7060cdb | ||
|
|
6c1b0f9a60 | ||
|
|
ffd8d9fe6b | ||
|
|
0ba22df233 | ||
|
|
bc72b2d4a3 | ||
|
|
88c10e85c3 | ||
|
|
1ad8d6cdfd | ||
|
|
ee7a39fe21 | ||
|
|
3b7f6dadaa | ||
|
|
41642cd479 | ||
|
|
1b27472c81 | ||
|
|
a60fd530f3 | ||
|
|
4878e92f44 | ||
|
|
3bf8ddf376 | ||
|
|
65dbc5498d | ||
|
|
85390a078c | ||
|
|
bd6cf287dc | ||
|
|
662c5ff89a | ||
|
|
1dc7700a62 | ||
|
|
d97f0c907f | ||
|
|
ae60f81314 | ||
|
|
bfcc47c331 | ||
|
|
679adb324c | ||
|
|
58a6cdedf7 | ||
|
|
dedd898a29 | ||
|
|
0b09cd18b9 | ||
|
|
3537c9ee09 | ||
|
|
97930c3de8 | ||
|
|
c51916b40c | ||
|
|
ed61ac656a | ||
|
|
56cf37d637 | ||
|
|
ea38c0b4b8 | ||
|
|
dd5ae8450d | ||
|
|
0070d9e500 | ||
|
|
aea888238a | ||
|
|
700d0d2383 | ||
|
|
0bfd06a5a3 | ||
|
|
8388152d79 | ||
|
|
795934dafe | ||
|
|
70e574e25a | ||
|
|
3c76c72479 | ||
|
|
53159bd420 | ||
|
|
901cc09e38 | ||
|
|
d4fbc9dc61 | ||
|
|
1a350e79e0 | ||
|
|
e71e6bc3d7 | ||
|
|
c65df2de89 | ||
|
|
5fbcaa91e0 | ||
|
|
d718e5a142 | ||
|
|
806f78244d | ||
|
|
e8fb01ebdd | ||
|
|
514e7e06e4 | ||
|
|
69f9ecd93c | ||
|
|
4df4e5f963 | ||
|
|
ecbef65891 | ||
|
|
98b29f97a1 | ||
|
|
62f5c32609 | ||
|
|
43b55e2d99 | ||
|
|
c03ddf60a0 | ||
|
|
ce70b04097 | ||
|
|
e2736babaa | ||
|
|
2155229d7f | ||
|
|
cf37c6fd98 | ||
|
|
3512c3a623 | ||
|
|
d0edc3fa07 | ||
|
|
b18bc62983 | ||
|
|
a912055255 | ||
|
|
10f329d673 | ||
|
|
f3a901cb3a | ||
|
|
759569b871 | ||
|
|
c1d960138e | ||
|
|
0d177e44f8 | ||
|
|
3b34fb11ef | ||
|
|
6c8270d0e5 | ||
|
|
5693ee7f9e | ||
|
|
256ac2f55a | ||
|
|
2a8ba9125c | ||
|
|
2de6ac5da1 | ||
|
|
6a96b6b485 | ||
|
|
5fce83fb51 | ||
|
|
81b6570366 | ||
|
|
042612c677 | ||
|
|
e64002dfc4 | ||
|
|
70cf8928c4 | ||
|
|
3f9fd424e2 | ||
|
|
f38fb36eba | ||
|
|
5e1ae23c4e | ||
|
|
53a50474ba | ||
|
|
92ced69bfd | ||
|
|
f1320c9a5d | ||
|
|
5b193249a8 | ||
|
|
0571c6801a | ||
|
|
c57c7d64de | ||
|
|
0bf3499917 | ||
|
|
3cb695279a | ||
|
|
2c5fbaffb4 | ||
|
|
85b5b18a57 | ||
|
|
be0409bfc2 | ||
|
|
bd3709463a | ||
|
|
cf08e54bd8 | ||
|
|
641dcfc21e | ||
|
|
43949c3ef4 | ||
|
|
6a9b5282cd | ||
|
|
b791190e3b | ||
|
|
1d23ed3685 | ||
|
|
3fb18ecce8 | ||
|
|
3eaa76174e | ||
|
|
2b58d7d46e | ||
|
|
fb084d013b | ||
|
|
8754839c81 | ||
|
|
13ad62d3e1 | ||
|
|
0997cd7a9d | ||
|
|
962d5e965b | ||
|
|
7673cd0793 | ||
|
|
aae7b1bc14 | ||
|
|
e7700b60f3 | ||
|
|
aa9fa09822 | ||
|
|
c5f6d8ccf3 | ||
|
|
cb1953baf2 | ||
|
|
d94d615d76 | ||
|
|
05f98e9275 | ||
|
|
db276f6d32 | ||
|
|
89a23164ff | ||
|
|
1f0fe00cbf | ||
|
|
204a5a0c76 | ||
|
|
fea7c99021 | ||
|
|
3e77259b2c | ||
|
|
968a8f1cd0 | ||
|
|
b6c3234e96 | ||
|
|
dc2a408041 | ||
|
|
afedce5cb2 | ||
|
|
d8df848136 | ||
|
|
1b16df4482 | ||
|
|
1560afab97 | ||
|
|
bbe1f6364b | ||
|
|
6bd5958c3c | ||
|
|
0700cf29ea | ||
|
|
2514528337 | ||
|
|
827501c9f7 | ||
|
|
23e2814fe7 | ||
|
|
5160ead093 | ||
|
|
acbcc46a91 | ||
|
|
ed7e16483b | ||
|
|
a3be679acf | ||
|
|
4f29f7f3f9 | ||
|
|
7321a6d7f8 | ||
|
|
761ee42396 | ||
|
|
6dab5e3cf3 | ||
|
|
b2a041c7c4 | ||
|
|
575b764487 | ||
|
|
325c836510 | ||
|
|
0360292b94 | ||
|
|
cc7cd32c90 | ||
|
|
4b5d3047bb | ||
|
|
6e79b37a66 | ||
|
|
4720e045a3 | ||
|
|
79895a1ce4 | ||
|
|
a3c16d48ec | ||
|
|
431ea6da32 | ||
|
|
b9e819e343 | ||
|
|
a7f449f746 | ||
|
|
b608af1d51 | ||
|
|
21723e29bc | ||
|
|
dc22dff713 | ||
|
|
9a5e04af0e | ||
|
|
6037c158f4 | ||
|
|
860c671f8c | ||
|
|
4701456a46 | ||
|
|
c3153f6b93 | ||
|
|
da628705df | ||
|
|
ed86eb2274 | ||
|
|
871f9f953e | ||
|
|
77e98508fb | ||
|
|
e6146e5243 | ||
|
|
d0ebfb57c4 | ||
|
|
81639c0f15 | ||
|
|
93f074241d | ||
|
|
d15d8f6644 | ||
|
|
12aae44672 | ||
|
|
60f77c85da | ||
|
|
c7e955b4a8 | ||
|
|
6715bc7c5c | ||
|
|
1b282f1987 | ||
|
|
16c44ea851 | ||
|
|
400c77f258 | ||
|
|
9d4fd63cde | ||
|
|
0741e45ce6 | ||
|
|
4284955412 | ||
|
|
c9d7e66545 | ||
|
|
28c211cd56 | ||
|
|
1fde8e4600 | ||
|
|
5c27bd2c10 | ||
|
|
3e2e704765 | ||
|
|
423c56f582 | ||
|
|
2042274f10 | ||
|
|
0fd464cb96 | ||
|
|
a1834d9885 | ||
|
|
57b99e3900 | ||
|
|
29c46eeb0a | ||
|
|
2de5acf12c | ||
|
|
0a6f9eb8e1 | ||
|
|
73bb1ecd2d | ||
|
|
645c1ec9df | ||
|
|
dd5f0d0753 | ||
|
|
d5de69cd6a | ||
|
|
119b222428 | ||
|
|
92d499a274 | ||
|
|
97c24dbea3 | ||
|
|
4b74673795 | ||
|
|
6a85475402 | ||
|
|
6e0e646938 | ||
|
|
937c20c082 | ||
|
|
75215cfdc6 | ||
|
|
163b1dd7cf | ||
|
|
603c9f9269 | ||
|
|
fe540045fc | ||
|
|
dee672287b | ||
|
|
c21ea5ecbe | ||
|
|
d456051eb3 | ||
|
|
9b07f013a4 | ||
|
|
7cbdb61f2c | ||
|
|
8494f615d0 | ||
|
|
0d987aae99 | ||
|
|
81276bfc16 | ||
|
|
1a541bd133 | ||
|
|
fa2a90fab4 | ||
|
|
91eaa64ebb | ||
|
|
0a4c7cae25 | ||
|
|
ba695ebbe9 | ||
|
|
22fb0b3bdd | ||
|
|
ca8e9d0143 | ||
|
|
d3d7f3c733 | ||
|
|
7744d7287b | ||
|
|
ec21e8329d | ||
|
|
0031d55bab | ||
|
|
b9a0aaa574 | ||
|
|
9b2ebf169b | ||
|
|
ae8b85a3e2 | ||
|
|
4df2f79bcf | ||
|
|
ed065f718d | ||
|
|
90d065df80 | ||
|
|
87d2131789 | ||
|
|
071561c570 | ||
|
|
404d2f82a3 | ||
|
|
74280baa85 | ||
|
|
fa08216600 | ||
|
|
fbd83e61b7 | ||
|
|
6acb0da933 | ||
|
|
d32abecb25 | ||
|
|
d5f9ba7e5e | ||
|
|
f58bc81c36 | ||
|
|
fefab4c4c6 | ||
|
|
9dc54fdcff | ||
|
|
85fdfedabe | ||
|
|
951af5f3fb | ||
|
|
fe58594a36 | ||
|
|
072201016c | ||
|
|
8794156767 | ||
|
|
da245c409a | ||
|
|
171bb004c4 | ||
|
|
99ad0ecb7b | ||
|
|
a959ba1748 | ||
|
|
d1aa9fe441 | ||
|
|
3326b9fbdc | ||
|
|
13874d64ad | ||
|
|
bc574c272c | ||
|
|
144a861142 | ||
|
|
e01338f055 | ||
|
|
22493c2797 | ||
|
|
a4a677a6fb | ||
|
|
8db9689999 | ||
|
|
dea6411e1c | ||
|
|
a31feee311 | ||
|
|
ad4143d035 | ||
|
|
0406c868bc | ||
|
|
cfd235ba34 | ||
|
|
cedd0d6f4d | ||
|
|
e6a9672a14 | ||
|
|
d7735255ec | ||
|
|
67a6ad1168 | ||
|
|
18dc73cbcb | ||
|
|
25d6322186 | ||
|
|
882be5cdf8 | ||
|
|
9173f0b876 | ||
|
|
29ee837b24 | ||
|
|
94f966e027 | ||
|
|
d1ac5b11e5 | ||
|
|
23209e79e9 | ||
|
|
a502d309f1 | ||
|
|
d316173ab6 | ||
|
|
23be065c52 | ||
|
|
9947f36286 | ||
|
|
e4a6d19c17 | ||
|
|
3128c116e8 | ||
|
|
ff894acf4d | ||
|
|
d0e31e8acd | ||
|
|
4aafece68e | ||
|
|
edda2ca3a5 | ||
|
|
d0413e63be | ||
|
|
9b2402a421 | ||
|
|
22409b4f01 | ||
|
|
172bb204f4 | ||
|
|
0a15e09805 | ||
|
|
d06c5bfdf3 | ||
|
|
68c8d4dc23 | ||
|
|
c73271c617 | ||
|
|
5bb2b57a4e | ||
|
|
d8ad33ff77 | ||
|
|
e841343e5b | ||
|
|
f90b24db40 | ||
|
|
a3e4f23891 | ||
|
|
c99456f77d | ||
|
|
9de6f79016 | ||
|
|
b74b5b9b1b | ||
|
|
c7c9607071 | ||
|
|
041cb69bb8 | ||
|
|
b5223c13e7 | ||
|
|
7136f8392d | ||
|
|
58924e6834 | ||
|
|
44478804f8 | ||
|
|
37b05f1dde | ||
|
|
9da20b1941 | ||
|
|
0e6889c6c1 | ||
|
|
e072fb88f9 | ||
|
|
bbb559d1cd | ||
|
|
a0745fcaa5 | ||
|
|
f3de398c89 | ||
|
|
836871f102 | ||
|
|
cd5135ba27 | ||
|
|
c5029213fc | ||
|
|
deb532071b | ||
|
|
01971fb91a | ||
|
|
e281041458 | ||
|
|
19f0088b40 | ||
|
|
db5713a050 | ||
|
|
563f890cf4 | ||
|
|
09b79bd174 | ||
|
|
857471e1ad | ||
|
|
1493eaf28b | ||
|
|
4a4d93767e | ||
|
|
134093b18e | ||
|
|
93dd37e822 | ||
|
|
ec193813b4 | ||
|
|
70f7484fb5 | ||
|
|
7bb4df78c8 | ||
|
|
d961d4cad1 | ||
|
|
0cdce1a81b | ||
|
|
23f397c805 | ||
|
|
70aaa2a04c | ||
|
|
86344b43ba | ||
|
|
a12bfeab46 | ||
|
|
6fa12f90c5 | ||
|
|
fd9038463b | ||
|
|
75fbf9639a | ||
|
|
4db8eca391 | ||
|
|
08e5b6f36f | ||
|
|
6f79845b21 | ||
|
|
99122cac7c | ||
|
|
3f7edd840e | ||
|
|
63729fb0ea | ||
|
|
207613c00b | ||
|
|
323f1d5c05 | ||
|
|
00b8119b81 | ||
|
|
db024130be | ||
|
|
6536f35dc0 | ||
|
|
eee4ab0725 | ||
|
|
02ac0d8de5 | ||
|
|
773e8e7d54 | ||
|
|
9eade91958 | ||
|
|
0dbc5221b2 | ||
|
|
b3debcd014 | ||
|
|
48a2f2da39 | ||
|
|
f4f29a0e27 | ||
|
|
424a450654 | ||
|
|
edc18e07fe | ||
|
|
00da233322 | ||
|
|
9ef2aa966d | ||
|
|
ab3350d08d | ||
|
|
2e5280c46a | ||
|
|
8b740fc3ac | ||
|
|
7e13e51198 | ||
|
|
3cb5a061c9 | ||
|
|
d2d1984797 | ||
|
|
8607d626fa | ||
|
|
388d9e7171 | ||
|
|
64a019597d | ||
|
|
cc03ad7d64 | ||
|
|
a846b09ad3 | ||
|
|
60e378b1ce | ||
|
|
20685b8344 | ||
|
|
c7235f66ba | ||
|
|
6384f4f56f | ||
|
|
d6bb9e40b2 | ||
|
|
c55dcfd26a | ||
|
|
fedc98f848 | ||
|
|
d2085d57f8 | ||
|
|
f6be6bc3a9 | ||
|
|
f1739f2394 | ||
|
|
eccc5d709a | ||
|
|
f4e91013f2 | ||
|
|
56aa5c77d2 | ||
|
|
ed0b291237 | ||
|
|
dfaae6e617 | ||
|
|
22e1a8cc05 | ||
|
|
5e661ea208 | ||
|
|
f8e91155e2 | ||
|
|
0c507988b6 | ||
|
|
d7129d6195 | ||
|
|
f84a347514 | ||
|
|
661d5f9d43 | ||
|
|
a1d35a8dad | ||
|
|
6c1dbff91c | ||
|
|
58a3da386a | ||
|
|
4886426ea0 | ||
|
|
0411fe003a | ||
|
|
4f49636899 | ||
|
|
8ab9a508d4 | ||
|
|
05641cfb02 | ||
|
|
7866eed613 | ||
|
|
301a162e71 | ||
|
|
cf7ea09341 | ||
|
|
5875c31750 | ||
|
|
d09a5efe80 | ||
|
|
fe2edf16b0 | ||
|
|
40cbb745cd | ||
|
|
92a26caf03 | ||
|
|
81520293f4 | ||
|
|
0d659f4435 | ||
|
|
d55909989c | ||
|
|
33700ccc06 | ||
|
|
082bbdd82b | ||
|
|
59379ae59a | ||
|
|
41d7066d6e | ||
|
|
75816b5d8e | ||
|
|
892fde89b5 | ||
|
|
55fdc28450 | ||
|
|
b578969b8e | ||
|
|
5e2794a62e | ||
|
|
65e947c95b | ||
|
|
900ce73200 | ||
|
|
42f52b7484 | ||
|
|
d88003b542 | ||
|
|
fa41b8eb23 | ||
|
|
448ef16987 | ||
|
|
a9ca6502ed | ||
|
|
51352ba734 | ||
|
|
09ba42d36c | ||
|
|
22f0a4078b | ||
|
|
c85316b912 | ||
|
|
ab36b28b51 | ||
|
|
3d2873fd4f | ||
|
|
0a8b8919b9 | ||
|
|
43afbd4505 | ||
|
|
e5ee64c575 | ||
|
|
7925b601f4 | ||
|
|
f797e6fff5 | ||
|
|
92b4e9e348 | ||
|
|
5f00027425 | ||
|
|
6cc67851b3 | ||
|
|
307bd25e3e | ||
|
|
45817c699f | ||
|
|
7a95555bd5 | ||
|
|
d1a3b667fe | ||
|
|
4c3aef196d | ||
|
|
74aff3fb1a | ||
|
|
f218eaad51 | ||
|
|
893fdcade3 | ||
|
|
84c752761a | ||
|
|
d1a5143312 | ||
|
|
c25de2a191 | ||
|
|
c023cde8fe | ||
|
|
c0ddec6b4b | ||
|
|
64e500c524 | ||
|
|
3fb8e0ebd1 | ||
|
|
0938a3c592 | ||
|
|
b45c6eda38 | ||
|
|
1200d7d894 | ||
|
|
be9823c5ce | ||
|
|
adf960753c | ||
|
|
5b31440018 | ||
|
|
c239f0300f | ||
|
|
4ca6bf763e | ||
|
|
80c3d2fa58 | ||
|
|
aeb1933abb | ||
|
|
f135c6ae8b | ||
|
|
1817aab2da | ||
|
|
db80a2fa09 | ||
|
|
d4b7b3d3d2 | ||
|
|
6b9d42fec1 | ||
|
|
1a8763731b | ||
|
|
18b8462a5f | ||
|
|
7c4d7865ea | ||
|
|
1080b1fb94 | ||
|
|
e9a11588c4 | ||
|
|
4712a4305c | ||
|
|
b68b904838 | ||
|
|
f1196bb988 | ||
|
|
98f485bac9 | ||
|
|
870e77b137 | ||
|
|
d709d92936 | ||
|
|
ca79cc1a1d | ||
|
|
67939ba44b | ||
|
|
8c364d3eb8 | ||
|
|
e80e1b9014 | ||
|
|
7401b4c8d3 | ||
|
|
f652d2b233 | ||
|
|
fbeca53cd7 | ||
|
|
c76d68f382 | ||
|
|
41e32bc08a | ||
|
|
00cc83882a | ||
|
|
5806464406 | ||
|
|
55f54081b1 | ||
|
|
c4f9a998e5 | ||
|
|
5ccef5db32 | ||
|
|
eff4b665b1 | ||
|
|
ffe6ce5a6b | ||
|
|
ac5df3fd28 | ||
|
|
e7201b4429 | ||
|
|
6c1bfaf052 | ||
|
|
8cae13f845 | ||
|
|
9c41960405 | ||
|
|
246b00e2bf | ||
|
|
6f6c28ca7c | ||
|
|
a87f7c875d | ||
|
|
071e74124e | ||
|
|
7ea10064d7 | ||
|
|
648e2bb2dd | ||
|
|
59c6b0565e | ||
|
|
17cb303273 | ||
|
|
10d5d487c3 | ||
|
|
3aa68c1a36 | ||
|
|
b17bc21159 | ||
|
|
fc2caa0e12 | ||
|
|
22498de395 | ||
|
|
58446fdd33 | ||
|
|
12c2b8c7a7 | ||
|
|
0228d8d7f4 | ||
|
|
144a71a636 | ||
|
|
b5e32421bd | ||
|
|
41bd372b14 | ||
|
|
c24da847fc | ||
|
|
d2b6096570 | ||
|
|
e3f988b071 | ||
|
|
3a81227a73 | ||
|
|
62c4e1625b | ||
|
|
5c6d1fe6fd | ||
|
|
2026eab7f2 | ||
|
|
ad774db59e | ||
|
|
11f54f3cda | ||
|
|
2b484a94ec | ||
|
|
d2b852c9a2 | ||
|
|
73065ed319 | ||
|
|
fe350bda91 | ||
|
|
774c80e2a4 | ||
|
|
59dd6383eb | ||
|
|
c35c02c64b | ||
|
|
a0ab0e8688 | ||
|
|
2645166c8c | ||
|
|
fd01d1b6af | ||
|
|
0e52117e78 | ||
|
|
8ad1c6ab1a | ||
|
|
b92d7c2c21 | ||
|
|
85a0b584a5 | ||
|
|
4942393842 | ||
|
|
5eedceedc3 | ||
|
|
07a29916de | ||
|
|
2d49b72f06 | ||
|
|
11bc2e57a9 | ||
|
|
3ecd7137ff | ||
|
|
c80752a21d | ||
|
|
efcf4c7920 | ||
|
|
a19bd14a84 | ||
|
|
e45082f5d6 | ||
|
|
0a5e7a3231 | ||
|
|
6746588c15 | ||
|
|
22e9e56a5b | ||
|
|
48c4cd8ca9 | ||
|
|
52d96201fe | ||
|
|
39598b4ff4 | ||
|
|
3bea39a117 | ||
|
|
f1a80c3389 | ||
|
|
3590265836 | ||
|
|
a4df1f1fb8 | ||
|
|
a95ed79e93 | ||
|
|
90eccd6edf | ||
|
|
a03744e24e | ||
|
|
56af468320 | ||
|
|
a95d2a77d8 | ||
|
|
81a293e7dd | ||
|
|
24f876d09f | ||
|
|
fa2b3fbe3e | ||
|
|
e12ce4e596 | ||
|
|
3a91994549 | ||
|
|
c87bd79051 | ||
|
|
083eb264e6 | ||
|
|
97b82e5520 | ||
|
|
d04fdb6f69 | ||
|
|
9137bd47c9 | ||
|
|
2903773c86 | ||
|
|
750bf90576 | ||
|
|
0843d32388 | ||
|
|
fa19525ab9 | ||
|
|
54404339c2 | ||
|
|
0f8fe82f17 | ||
|
|
1bf947caf2 | ||
|
|
d92ba0c7e1 | ||
|
|
0cf2c74b40 | ||
|
|
1ed4e97167 | ||
|
|
eec53d5874 | ||
|
|
6a0871183c | ||
|
|
e481ea4457 | ||
|
|
dac1490acc | ||
|
|
3e16614eab | ||
|
|
5eeb51585d | ||
|
|
be51be7872 | ||
|
|
a5db9d98e9 | ||
|
|
19017317f6 | ||
|
|
2632e71815 | ||
|
|
6addcebaf5 | ||
|
|
4acdfa99f9 | ||
|
|
310f3c455e | ||
|
|
b7fb9336be | ||
|
|
07966424f8 | ||
|
|
72fee02ec4 | ||
|
|
7e94ee5d49 | ||
|
|
920201312e | ||
|
|
429b01b569 | ||
|
|
d4a93b8e4b | ||
|
|
46a5b3355a | ||
|
|
3942517032 | ||
|
|
4d7987214b | ||
|
|
3ac84e530a | ||
|
|
21b7f80d42 | ||
|
|
b90956c49f | ||
|
|
19e59ae178 | ||
|
|
4a80cb6d5c | ||
|
|
d054e2cac5 | ||
|
|
00361c15b9 | ||
|
|
44fecff2f2 | ||
|
|
345247df11 | ||
|
|
22bc573c10 | ||
|
|
a332678cfb | ||
|
|
84dfe69f0a | ||
|
|
5feb843d73 | ||
|
|
b229c9628d | ||
|
|
430a486438 | ||
|
|
325d832c1b | ||
|
|
3741d28565 | ||
|
|
8d37c678d3 | ||
|
|
56922e1c01 | ||
|
|
656cd3007b | ||
|
|
5b931f6272 | ||
|
|
a6bace9241 | ||
|
|
0e388968c4 | ||
|
|
5a92487d38 | ||
|
|
44a2cf518c | ||
|
|
d18817acb0 | ||
|
|
5e7987ce1a | ||
|
|
8275e33223 | ||
|
|
5e5b5e9797 | ||
|
|
0565e0ae50 | ||
|
|
22b7a3efb2 | ||
|
|
4b6792ccbe | ||
|
|
cc8064877d | ||
|
|
0120419d09 | ||
|
|
a66028ff02 | ||
|
|
4754a0dcd7 | ||
|
|
b11270ce4a | ||
|
|
3c2534d8aa | ||
|
|
8584aae675 | ||
|
|
151f654dd9 | ||
|
|
9a927b1fc7 | ||
|
|
4b27f95b30 | ||
|
|
ce331abdbc | ||
|
|
b7abdac800 | ||
|
|
2f1da5d76a | ||
|
|
fa29c679f5 | ||
|
|
36450af23f | ||
|
|
b19efd2f75 | ||
|
|
d917a3a915 | ||
|
|
10ab3e4bd8 | ||
|
|
4accd2be85 | ||
|
|
e1f5cb24ec | ||
|
|
9f8054c9de | ||
|
|
3baaf8c170 | ||
|
|
88a9f8b14e | ||
|
|
f300da6eff | ||
|
|
d9192d003d | ||
|
|
f9a9d5d336 | ||
|
|
5010cae068 | ||
|
|
0585fc1dfe | ||
|
|
0b5906538e | ||
|
|
0ece58572e | ||
|
|
bcebcadfaa | ||
|
|
386a03381c | ||
|
|
84aa631196 | ||
|
|
1c47b7f84a | ||
|
|
f1752cc720 | ||
|
|
1612df14c1 | ||
|
|
ead76fe661 | ||
|
|
7b5a617bf8 | ||
|
|
953db79476 | ||
|
|
6945cecaca | ||
|
|
42c792cbb7 | ||
|
|
20651a8d59 | ||
|
|
59b75c18fc | ||
|
|
a9aac72a60 | ||
|
|
406ac37fb9 | ||
|
|
108a992643 | ||
|
|
826c824084 | ||
|
|
5a887cc55a | ||
|
|
e029cd8b3d | ||
|
|
4ae66e0bc9 | ||
|
|
06d8d01fd9 | ||
|
|
64ec6de414 | ||
|
|
26e237f2d1 | ||
|
|
38813e6c27 | ||
|
|
613c0d8bb5 | ||
|
|
e26ecad013 | ||
|
|
7551869a2e | ||
|
|
44f8d45768 | ||
|
|
cb49172e98 | ||
|
|
72ecb88c76 | ||
|
|
5b076fd00b | ||
|
|
b222dbe5a3 | ||
|
|
6180b4ffef | ||
|
|
db6e09370c | ||
|
|
7c442064e6 | ||
|
|
336a0d2558 | ||
|
|
491b0cea29 | ||
|
|
7313cafa1c | ||
|
|
39ec4d39fc | ||
|
|
8f529bf495 | ||
|
|
9c32cc68d9 | ||
|
|
a7b9d278c2 | ||
|
|
1475ca70ab | ||
|
|
7eef45f1c0 | ||
|
|
5148a5a79b | ||
|
|
c54b847738 | ||
|
|
da90ae5436 | ||
|
|
9fcce85ea7 | ||
|
|
f6fa90178d | ||
|
|
935f4c1da2 | ||
|
|
924d44c573 | ||
|
|
003ad4c54f | ||
|
|
0b2e050e63 | ||
|
|
2df377b7f5 | ||
|
|
fc9b179e9a | ||
|
|
0996d396cf | ||
|
|
91c8999021 | ||
|
|
26881f41d6 | ||
|
|
bc08cb1270 | ||
|
|
d926d90dd9 | ||
|
|
e876af1aa2 | ||
|
|
479826709b | ||
|
|
13672919d0 | ||
|
|
1ece74a0b0 | ||
|
|
e27f45809b | ||
|
|
0fcb8b9f2e | ||
|
|
e7771d5b67 | ||
|
|
23bd5484ee | ||
|
|
5d82fd17c2 | ||
|
|
8e2309ac58 | ||
|
|
bd1831e226 | ||
|
|
05539794e3 | ||
|
|
cebc4c8ca9 | ||
|
|
122b902f0f | ||
|
|
f40e9fb9be | ||
|
|
b406a3b504 | ||
|
|
8fa27904e7 | ||
|
|
e9055a5ad6 | ||
|
|
a5f7a88ba0 | ||
|
|
1413c7a3d2 | ||
|
|
3949a2ed5c | ||
|
|
fa470bee35 | ||
|
|
6a7abbaa82 | ||
|
|
cd30f6da66 | ||
|
|
5f08d0fbbf | ||
|
|
500df533bb | ||
|
|
2bba31940d | ||
|
|
6afd5a38c9 | ||
|
|
cea078f6ef | ||
|
|
489851906e | ||
|
|
7292ad460f | ||
|
|
e7eab09dfc | ||
|
|
1a475a29d0 | ||
|
|
3f7e92ae44 | ||
|
|
df23d3660b | ||
|
|
18d4c39291 | ||
|
|
12c086dcd1 | ||
|
|
b91a2286e4 | ||
|
|
ea81cfb1af | ||
|
|
2cf9ade105 | ||
|
|
69f8f426a6 | ||
|
|
c4e5710b48 | ||
|
|
8b361ee646 | ||
|
|
35d95c47c7 | ||
|
|
2da8273de6 | ||
|
|
f10a6cb403 | ||
|
|
171d64841a | ||
|
|
9b7aa0c894 | ||
|
|
adc6604fa2 | ||
|
|
9cb05a0ae1 | ||
|
|
a1d9a7cbbe | ||
|
|
fcce1a36b2 | ||
|
|
3006209ecc | ||
|
|
846418b63b | ||
|
|
f476164ec7 | ||
|
|
3a60526fbd | ||
|
|
34a3f75c1c | ||
|
|
db10e90801 | ||
|
|
073fe72a49 | ||
|
|
8ec489d26f | ||
|
|
1c7fa21b86 | ||
|
|
fafd93e958 | ||
|
|
65da85991c | ||
|
|
b392788d5f | ||
|
|
d24520d3d8 | ||
|
|
8b6acf2375 | ||
|
|
b9637f166b | ||
|
|
9f8a2db500 | ||
|
|
2284d47f9f | ||
|
|
d6605e7119 | ||
|
|
374aa82e22 | ||
|
|
01d4b25303 | ||
|
|
55e19f05aa | ||
|
|
580aa1975c | ||
|
|
8e2c6c7780 | ||
|
|
2b97a958cd | ||
|
|
5ff474d322 | ||
|
|
e9d60cdb1e | ||
|
|
6a57d4a7c7 | ||
|
|
a45c800718 | ||
|
|
f6825418da | ||
|
|
d8ffec474c | ||
|
|
77d8ab8d55 | ||
|
|
ac07a5217f | ||
|
|
7dcb853c6c | ||
|
|
1a8bbb6bb8 | ||
|
|
615956d502 | ||
|
|
1e91dd7597 | ||
|
|
d50a6ebce5 | ||
|
|
c3d1600c07 | ||
|
|
955176f45a | ||
|
|
8dc6b12e8b | ||
|
|
f2a238915a | ||
|
|
47d270aa9f | ||
|
|
fe2df9b530 | ||
|
|
7b23b0a4df | ||
|
|
66b95f2ef8 | ||
|
|
6d13aa5314 | ||
|
|
8c47f7b0e6 | ||
|
|
81d0c9472f | ||
|
|
5a38a56dc6 | ||
|
|
855578bf05 | ||
|
|
99f2b5b4b1 | ||
|
|
00073698b3 | ||
|
|
ba6012b28c | ||
|
|
65e0be80e0 | ||
|
|
f6339b691c | ||
|
|
f8ef219665 | ||
|
|
23e63ba4a0 | ||
|
|
c04ca0a804 | ||
|
|
c44c380bb2 | ||
|
|
a307c63b0b | ||
|
|
587ab4afe0 | ||
|
|
cc41731ae1 | ||
|
|
e2b93d3e6f | ||
|
|
58a1304ddc | ||
|
|
49f141d64a | ||
|
|
4b214a49ee | ||
|
|
77590367ac | ||
|
|
0b6175bac6 | ||
|
|
384609ae77 | ||
|
|
dae143a724 | ||
|
|
c43231c492 | ||
|
|
7f1bdd0129 | ||
|
|
f4e4fb1d13 | ||
|
|
3fb2433d3a | ||
|
|
d4688fa4e4 | ||
|
|
de811b6f68 | ||
|
|
d8ecd1f452 | ||
|
|
03f39d7d48 | ||
|
|
2e118a3f8d | ||
|
|
ae88141c36 | ||
|
|
573ed96e82 | ||
|
|
b6cde2fec8 | ||
|
|
29a5e93cfd | ||
|
|
d130de3c80 | ||
|
|
838a373bea | ||
|
|
3448a3b494 | ||
|
|
42747e743c | ||
|
|
0b63b1286f | ||
|
|
db7ca1a0c8 | ||
|
|
85a639ec5f | ||
|
|
7b16ca6ff7 | ||
|
|
a9ef41478c | ||
|
|
ed346dd952 | ||
|
|
94b3255c93 | ||
|
|
9757f6a48d | ||
|
|
e80d30689c | ||
|
|
9c5a174409 | ||
|
|
51ce2d241c | ||
|
|
0c4d320dc2 | ||
|
|
72542bf0fd | ||
|
|
fa3ee35d4d | ||
|
|
cfff51a9eb | ||
|
|
c4a6b1469e | ||
|
|
d0cefd3813 | ||
|
|
d53fbef444 | ||
|
|
10cc9de31b | ||
|
|
8eec41cfbb | ||
|
|
c4c1b8a629 | ||
|
|
5360f38b14 | ||
|
|
1e9ab54609 | ||
|
|
2f91e0ce1c | ||
|
|
b4ae6911c9 | ||
|
|
08c04e710a | ||
|
|
a99a6431b2 | ||
|
|
ada1d51aaa | ||
|
|
59e4a28b31 | ||
|
|
1bf16355c1 | ||
|
|
34ceffb86a | ||
|
|
bd53837f80 | ||
|
|
743cf4e297 | ||
|
|
1864b5d34c | ||
|
|
30b2a19eb0 | ||
|
|
788667b687 | ||
|
|
e2e8b7088a | ||
|
|
a19f29b464 | ||
|
|
2add2c1dd2 | ||
|
|
7812a410b3 | ||
|
|
e753d9b9f8 | ||
|
|
a2c7fc3046 | ||
|
|
2ec7d21bb9 | ||
|
|
d850166a80 | ||
|
|
4fc306620a | ||
|
|
11d3d7a15a | ||
|
|
ae01440a15 | ||
|
|
0a994bbe3f | ||
|
|
62ac134359 | ||
|
|
a56efa9ef5 | ||
|
|
3522066867 | ||
|
|
1f6f15ed73 | ||
|
|
f869daa37c | ||
|
|
b8992bde64 | ||
|
|
efaa64d00b | ||
|
|
a1576bd493 | ||
|
|
82f35d2aef | ||
|
|
8acb31bbe8 | ||
|
|
cd47e76245 | ||
|
|
0fed65a478 | ||
|
|
677fbba1ac | ||
|
|
71079aead3 | ||
|
|
47500daafa | ||
|
|
7e25be0717 | ||
|
|
7d0c32ef3f | ||
|
|
9d8e011e2c | ||
|
|
44c8189c29 | ||
|
|
18a6c428c1 | ||
|
|
192edda48e | ||
|
|
7e5be6094f | ||
|
|
789d29c97a | ||
|
|
7acc31ec97 | ||
|
|
8ae314fbb0 | ||
|
|
8ee68a2349 | ||
|
|
9b053ce1a5 | ||
|
|
708a269ae5 | ||
|
|
343ecfbca6 | ||
|
|
0ec5ffff33 | ||
|
|
767c42a1fe | ||
|
|
2359818d7b | ||
|
|
5e2757f578 | ||
|
|
8814f21d59 | ||
|
|
a767f28eb6 | ||
|
|
353a51d07c | ||
|
|
e7bf8cbede | ||
|
|
722965b987 | ||
|
|
2a9a98cad7 | ||
|
|
abef4620d0 | ||
|
|
5cb2be7c93 | ||
|
|
1f0e643954 | ||
|
|
9f96529707 | ||
|
|
e6c30f178f | ||
|
|
7b39ad4893 | ||
|
|
a72eaf118f | ||
|
|
fb586a9f5a | ||
|
|
996b9848b4 | ||
|
|
5422595136 | ||
|
|
6ce387b0b0 | ||
|
|
58f5287a53 | ||
|
|
18e08d3690 | ||
|
|
39a06f9ba2 | ||
|
|
dbf5acdcde | ||
|
|
70e4e43d88 | ||
|
|
3f445607e0 | ||
|
|
669120e35a | ||
|
|
a9cdc9e37a | ||
|
|
d339c322ed | ||
|
|
67663e2946 | ||
|
|
906fbef9c2 | ||
|
|
6504db3bd4 | ||
|
|
d378653983 | ||
|
|
dc4ffe7a8c | ||
|
|
4c79af1f30 | ||
|
|
50048518a9 | ||
|
|
4dbb363211 | ||
|
|
68a7f7f4fd | ||
|
|
3fa5301894 | ||
|
|
45239b744c | ||
|
|
5d2c604a4a | ||
|
|
8f4e05b0b8 | ||
|
|
182a009d69 | ||
|
|
7dbd41afa8 | ||
|
|
d270e988bd | ||
|
|
0322a5c904 | ||
|
|
363a1a8080 | ||
|
|
1ab04e31a4 | ||
|
|
c1eb3a6ecf | ||
|
|
391a1d9707 | ||
|
|
d7023bcdac | ||
|
|
6c3102a60c | ||
|
|
f77b3b9756 | ||
|
|
3f17c90a8b | ||
|
|
6854af23eb | ||
|
|
759cbe2f7d | ||
|
|
eb1bbdd299 | ||
|
|
2c521b69ae | ||
|
|
31293ebc44 | ||
|
|
f4381c9740 | ||
|
|
9daa764fbb | ||
|
|
0b0373f4ee | ||
|
|
f87ab4b071 | ||
|
|
925850a012 | ||
|
|
8302acd78a | ||
|
|
e95c0859ab | ||
|
|
d01a69828a | ||
|
|
48e76273d1 | ||
|
|
448f9bc6cf | ||
|
|
57298eb811 | ||
|
|
0fcab93ac3 | ||
|
|
e816fa6afd | ||
|
|
6de565857d | ||
|
|
e3553b04ad | ||
|
|
e979113935 | ||
|
|
1087568de7 | ||
|
|
5cae2e595e | ||
|
|
500e0941e2 | ||
|
|
303123f3ec | ||
|
|
ba8eee16ee | ||
|
|
34374045a8 | ||
|
|
a96c5f0f5c | ||
|
|
be622f48af | ||
|
|
72783090cd | ||
|
|
422bd0577a | ||
|
|
55089044fa | ||
|
|
451c892457 | ||
|
|
56be7b7194 | ||
|
|
4141aa11e5 | ||
|
|
a74160a0b6 | ||
|
|
cfad5621ce | ||
|
|
ae823ae8ea | ||
|
|
7d6ef38bce | ||
|
|
d52ff40db1 | ||
|
|
cc060bbed6 | ||
|
|
8bf093d79b | ||
|
|
06b1dec2b6 | ||
|
|
eb223e1df2 | ||
|
|
6087ecadf0 | ||
|
|
7c809931d7 | ||
|
|
f821743163 | ||
|
|
b84e3f77f3 | ||
|
|
e641cef6f1 | ||
|
|
f229ca42b8 | ||
|
|
ed665584e9 | ||
|
|
79104affe3 | ||
|
|
74d58515d0 | ||
|
|
8133af5d20 | ||
|
|
1174e2e0c7 | ||
|
|
7fe618b037 | ||
|
|
f18ca4de37 | ||
|
|
eecf879119 | ||
|
|
a2299d0c52 | ||
|
|
1508c2902e | ||
|
|
0843776b6b | ||
|
|
8c86f7656a | ||
|
|
8394fc2ed4 | ||
|
|
8ee1581588 | ||
|
|
3b2250895d | ||
|
|
baabea5006 | ||
|
|
5c3fdc1354 | ||
|
|
e5d353ec7f | ||
|
|
1772bc7257 | ||
|
|
1aac0f8011 | ||
|
|
692d7bfb88 | ||
|
|
cc1878bdd7 | ||
|
|
790e3710a6 | ||
|
|
2c831bd756 | ||
|
|
44a79d2a8a | ||
|
|
14c3944578 | ||
|
|
cecc057ea4 | ||
|
|
e7439a074f | ||
|
|
0a222f27af | ||
|
|
cbbb2a6d59 | ||
|
|
cff68625e0 | ||
|
|
0a9250c3d5 | ||
|
|
dd54a13bdd | ||
|
|
40765ed46d | ||
|
|
bb73da1cb9 | ||
|
|
08493321dd | ||
|
|
b43f096ea6 | ||
|
|
bee2226e75 | ||
|
|
1ab3dcac48 | ||
|
|
3098b96919 | ||
|
|
0953e044b7 | ||
|
|
634d16d402 | ||
|
|
397ec499fe | ||
|
|
9f1d382472 | ||
|
|
74152406d1 | ||
|
|
d25d57c162 | ||
|
|
ade8a625bc | ||
|
|
da9a78c875 | ||
|
|
87798f4434 | ||
|
|
8dd3bd6878 | ||
|
|
b85a5be023 | ||
|
|
3a06b12f2c | ||
|
|
3a4631e9ad | ||
|
|
3c1157d330 | ||
|
|
decd54dba9 | ||
|
|
4afa3166ba | ||
|
|
e59521ae94 | ||
|
|
3a5dcab919 | ||
|
|
67b2178978 | ||
|
|
ae2af82d1a | ||
|
|
bb755c2351 | ||
|
|
ee8cef5aa9 | ||
|
|
d5b64a56d6 | ||
|
|
67aca64420 | ||
|
|
ecd9d146c6 | ||
|
|
24fba3c2b1 | ||
|
|
d3615e1a66 | ||
|
|
2dba3aca24 | ||
|
|
9c9e546f80 | ||
|
|
23b678bb03 | ||
|
|
2ddc6beb15 | ||
|
|
18bc422077 | ||
|
|
aff93591fd | ||
|
|
f762e1b923 | ||
|
|
04cbfa5f26 | ||
|
|
178dc61e94 | ||
|
|
25fc69d453 | ||
|
|
6afccadf74 | ||
|
|
d96a0d93ab | ||
|
|
8779e6a8cf | ||
|
|
8b49036cc2 | ||
|
|
713350713f | ||
|
|
2fcd2a6b80 | ||
|
|
d25573cae3 | ||
|
|
2835a53e30 | ||
|
|
d6637d30a6 | ||
|
|
60a8bed65a | ||
|
|
e88e3928dd | ||
|
|
c3cad47e4c | ||
|
|
58773c015c | ||
|
|
88a7312397 | ||
|
|
daa40033fc | ||
|
|
1ff748d3d1 | ||
|
|
f34cf9e086 | ||
|
|
70313bce69 | ||
|
|
1f4ade0be7 | ||
|
|
84acf743b9 | ||
|
|
daf0685355 | ||
|
|
4ebfde2797 | ||
|
|
54bb3da486 | ||
|
|
cd5c6dff5f | ||
|
|
26d33f0f92 | ||
|
|
38b113800c | ||
|
|
7dc3ed10cc | ||
|
|
5bae7997c0 | ||
|
|
f6ea1b41b3 | ||
|
|
912a11da22 | ||
|
|
88866cc905 | ||
|
|
92f30f5c4a | ||
|
|
6c94bbb0c2 | ||
|
|
2b3f17972a | ||
|
|
37572b9bdf | ||
|
|
3860ab62e3 | ||
|
|
3d3abab8b6 | ||
|
|
52cd19ada7 | ||
|
|
dc2b676cbc | ||
|
|
2bcd9d24d7 | ||
|
|
147572869b | ||
|
|
ae5a189feb | ||
|
|
05d70c894d | ||
|
|
51891a59ec | ||
|
|
dc493073af | ||
|
|
082ee90cc1 | ||
|
|
9fed32b715 | ||
|
|
ff05c8de16 | ||
|
|
2bdcc9417d | ||
|
|
ca27b58eec | ||
|
|
6de85d3326 | ||
|
|
e35041d92d | ||
|
|
b6f6506030 | ||
|
|
3aaefd752b | ||
|
|
e1cb86f832 | ||
|
|
e63a66bf57 | ||
|
|
b84d1cc61a | ||
|
|
c198c4c590 | ||
|
|
51784b9073 | ||
|
|
95277679b0 | ||
|
|
72ed1ff4f4 | ||
|
|
88c27ac8ae | ||
|
|
b0f26d96b2 | ||
|
|
9ba03fdfaf | ||
|
|
c98b73835b | ||
|
|
f5f47005aa | ||
|
|
b5cdf3ceb2 | ||
|
|
312eacf64c | ||
|
|
7825019ed1 | ||
|
|
7ead75f5b6 | ||
|
|
63b260698d | ||
|
|
992c238388 | ||
|
|
4966bb61bc | ||
|
|
14ad165b8e | ||
|
|
92d09eea3b | ||
|
|
5be2b9ab56 | ||
|
|
728ab45534 | ||
|
|
3440dfa28f | ||
|
|
c4bdb53749 | ||
|
|
e9c5a719b1 | ||
|
|
c225edc3ec | ||
|
|
452e2ee213 | ||
|
|
95ac700b3c | ||
|
|
c6c1f2d7a4 | ||
|
|
b3534833d7 | ||
|
|
ae93bc2a1f | ||
|
|
898224dc72 | ||
|
|
2b13d97196 | ||
|
|
77d837eb7d | ||
|
|
995ae3008c | ||
|
|
3a68a66e70 | ||
|
|
5e7427c378 | ||
|
|
82cee02962 | ||
|
|
852993149f | ||
|
|
43903a98a6 | ||
|
|
086208cbab | ||
|
|
89a86c159a | ||
|
|
323045cef7 | ||
|
|
30e82fb302 | ||
|
|
b713b516b4 | ||
|
|
5dac5858f2 | ||
|
|
a329ae0c87 | ||
|
|
d8f0700ff6 | ||
|
|
de130dcca0 | ||
|
|
d26dd5f6b3 | ||
|
|
4619d09efe | ||
|
|
0697ef55ed | ||
|
|
7418abb31c | ||
|
|
509f2be3a8 | ||
|
|
d2c8389f74 | ||
|
|
d1e9de6cdb | ||
|
|
4803a3605e | ||
|
|
42db972e07 | ||
|
|
f3abbaeb27 | ||
|
|
2243e2470b | ||
|
|
4030c8d774 | ||
|
|
fcd1722d3a | ||
|
|
c89c962374 | ||
|
|
0493dd37a1 | ||
|
|
4595cea400 | ||
|
|
9f52b22432 | ||
|
|
826883424c | ||
|
|
e667299806 | ||
|
|
c41d948fc9 | ||
|
|
e65fd59a49 | ||
|
|
a387130d23 | ||
|
|
693d33e18d | ||
|
|
104bad8f04 | ||
|
|
dbb3eb1664 | ||
|
|
d4ae98900f | ||
|
|
5fa9a1e64d | ||
|
|
1a0d065eca | ||
|
|
3b4edde90f | ||
|
|
2fff015206 | ||
|
|
3de768d954 | ||
|
|
35579e79fb | ||
|
|
5a844e4aaa | ||
|
|
307728b7bc | ||
|
|
98ef0d49f7 | ||
|
|
d3eb00fdf4 | ||
|
|
e67f314656 | ||
|
|
246ab0a22d | ||
|
|
06879ed8ef | ||
|
|
e81b6e3189 | ||
|
|
6f1bae8195 | ||
|
|
0e54062c73 | ||
|
|
5bf5e1e1c2 | ||
|
|
d26944a7a5 | ||
|
|
4cb2cb7b20 | ||
|
|
78b32ca639 | ||
|
|
90c1c3d2ed | ||
|
|
78fc7d9f2b | ||
|
|
a2055827ce | ||
|
|
b3c4ff8f2d | ||
|
|
091d9a6823 | ||
|
|
e8ee59cf00 | ||
|
|
c11ce048c7 | ||
|
|
c5dd351bf1 | ||
|
|
b811a3d45b | ||
|
|
a86ae715b9 | ||
|
|
3ea8c05466 | ||
|
|
1ccf24fe5f | ||
|
|
f295ee219c | ||
|
|
bfaa52ea13 | ||
|
|
3054cf2ae9 |
373 changed files with 84378 additions and 9522 deletions
|
|
@ -11,6 +11,10 @@
|
|||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/.history
|
||||
**/media
|
||||
**/models
|
||||
**/static
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
|
|
@ -26,3 +30,5 @@
|
|||
**/values.dev.yaml
|
||||
LICENSE
|
||||
README.md
|
||||
data/
|
||||
docker/data/
|
||||
|
|
|
|||
64
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
64
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
name: Bug Report
|
||||
description: I have an issue with Dispatcharr
|
||||
title: "[Bug]: "
|
||||
labels: ["Triage"]
|
||||
type: "Bug"
|
||||
projects: []
|
||||
assignees: []
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please make sure you search for similar issues before submitting. Thank you for your bug report!
|
||||
- type: textarea
|
||||
id: describe-the-bug
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: Make sure to attach screenshots if possible!
|
||||
placeholder: Tell us what you see!
|
||||
value: "A clear and concise description of what the bug is. What did you expect to happen?"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: How can we recreate this bug?
|
||||
description: Be detailed!
|
||||
placeholder: Tell us what you see!
|
||||
value: "1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: dispatcharr-version
|
||||
attributes:
|
||||
label: Dispatcharr Version
|
||||
description: What version of Dispatcharr are you running?
|
||||
placeholder: Located bottom left of main screen
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: docker-version
|
||||
attributes:
|
||||
label: Docker Version
|
||||
description: What version of Docker are you running?
|
||||
placeholder: docker --version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker-compose
|
||||
attributes:
|
||||
label: What's in your Docker Compose file?
|
||||
description: Please share your docker-compose.yml file
|
||||
placeholder: Tell us what you see!
|
||||
value: "If not using Docker Compose just put not using."
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: client-info
|
||||
attributes:
|
||||
label: Client Information
|
||||
description: What are you using the view the streams from Dispatcharr
|
||||
placeholder: Tell us what you see!
|
||||
value: "Device, App, Versions for both, etc..."
|
||||
validations:
|
||||
required: true
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
blank_issues_enabled: false
|
||||
39
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
39
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
name: Feature request
|
||||
description: I want to suggest a new feature for Dispatcharr
|
||||
title: "[Feature]: "
|
||||
labels: ["Triage"]
|
||||
type: "Feature"
|
||||
projects: []
|
||||
assignees: []
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for helping to make Dispatcharr better!
|
||||
- type: textarea
|
||||
id: describe-problem
|
||||
attributes:
|
||||
label: Is your feature request related to a problem?
|
||||
description: Make sure to attach screenshots if possible!
|
||||
placeholder: Tell us what you see!
|
||||
value: "A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: describe-solution
|
||||
attributes:
|
||||
label: Describe the solution you'd like
|
||||
description: A clear and concise description of what you want to happen.
|
||||
placeholder: Tell us what you see!
|
||||
value: "Describe here."
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: extras
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Anything else you want to add?
|
||||
placeholder: Tell us what you see!
|
||||
value: "Nothing Extra"
|
||||
validations:
|
||||
required: true
|
||||
250
.github/workflows/base-image.yml
vendored
Normal file
250
.github/workflows/base-image.yml
vendored
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
name: Base Image Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
# Get lowercase repository owner
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get repository name
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine branch name
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=base" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=base-dev" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# For other branches, use the branch name
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=base-${BRANCH}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker base image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/DispatcharrBase
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
275
.github/workflows/ci.yml
vendored
275
.github/workflows/ci.yml
vendored
|
|
@ -2,18 +2,86 @@ name: CI Pipeline
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
workflow_dispatch:
|
||||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
# compute a single timestamp, version, and repo metadata for the entire workflow
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
# no per-job outputs here; shared metadata comes from the `prepare` job
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -44,63 +112,162 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
run: |
|
||||
# Get lowercase repository owner
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get repository name
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine branch name
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# For other branches, use the branch name
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Determine if this is from a fork
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/amd64 # Fast build - amd64 only
|
||||
# Build only the platform for this matrix job to avoid running amd64
|
||||
# stages under qemu on an arm64 runner (and vice-versa). This makes
|
||||
# the matrix runner's platform the one built by buildx.
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
# push arch-specific tags from each matrix job (they will be combined
|
||||
# into a multi-arch manifest in a follow-up job)
|
||||
tags: |
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.timestamp.outputs.timestamp }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BASE_TAG=base
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ steps.timestamp.outputs.timestamp }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
# wait for prepare and all matrix builds to finish
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
VERSION=${{ needs.prepare.outputs.version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# branch tag (e.g. latest or dev)
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# version + timestamp tag
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
||||
# also create Docker Hub manifests using the same username
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
41
.github/workflows/frontend-tests.yml
vendored
Normal file
41
.github/workflows/frontend-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
name: Frontend Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-tests.yml'
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- '.github/workflows/frontend-tests.yml'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./frontend
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '24'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './frontend/package-lock.json'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
# - name: Run linter
|
||||
# run: npm run lint
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
195
.github/workflows/release.yml
vendored
195
.github/workflows/release.yml
vendored
|
|
@ -15,16 +15,22 @@ on:
|
|||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
|
|
@ -38,14 +44,55 @@ jobs:
|
|||
NEW_VERSION=$(python -c "import version; print(f'{version.__version__}')")
|
||||
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set lowercase repo owner
|
||||
id: repo_owner
|
||||
- name: Update Changelog
|
||||
run: |
|
||||
python scripts/update_changelog.py ${{ steps.update_version.outputs.new_version }}
|
||||
|
||||
- name: Set repository metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "lowercase=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py CHANGELOG.md
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
ref: main
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
|
@ -57,36 +104,134 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Release Image
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}
|
||||
org.opencontainers.image.description=Your ultimate IPTV & stream Management companion.
|
||||
org.opencontainers.image.url=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }}
|
||||
org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.licenses=See repository
|
||||
org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/
|
||||
org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }}
|
||||
org.opencontainers.image.authors=${{ github.actor }}
|
||||
maintainer=${{ github.actor }}
|
||||
build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64, #linux/arm/v7 # Multi-arch support for releases
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-arm64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-arm64
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
create-release:
|
||||
needs: [prepare, create-manifest]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.update_version.outputs.new_version }}
|
||||
name: Release v${{ steps.update_version.outputs.new_version }}
|
||||
tag_name: v${{ needs.prepare.outputs.new_version }}
|
||||
name: Release v${{ needs.prepare.outputs.new_version }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -18,4 +18,5 @@ dump.rdb
|
|||
debugpy*
|
||||
uwsgi.sock
|
||||
package-lock.json
|
||||
models
|
||||
models
|
||||
.idea
|
||||
1014
CHANGELOG.md
Normal file
1014
CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load diff
286
Plugins.md
Normal file
286
Plugins.md
Normal file
|
|
@ -0,0 +1,286 @@
|
|||
# Dispatcharr Plugins
|
||||
|
||||
This document explains how to build, install, and use Python plugins in Dispatcharr. It covers discovery, the plugin interface, settings, actions, how to access application APIs, and examples.
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
1) Create a folder under `/app/data/plugins/my_plugin/` (host path `data/plugins/my_plugin/` in the repo).
|
||||
|
||||
2) Add a `plugin.py` file exporting a `Plugin` class:
|
||||
|
||||
```
|
||||
# /app/data/plugins/my_plugin/plugin.py
|
||||
class Plugin:
|
||||
name = "My Plugin"
|
||||
version = "0.1.0"
|
||||
description = "Does something useful"
|
||||
|
||||
# Settings fields rendered by the UI and persisted by the backend
|
||||
fields = [
|
||||
{"id": "enabled", "label": "Enabled", "type": "boolean", "default": True},
|
||||
{"id": "limit", "label": "Item limit", "type": "number", "default": 5},
|
||||
{"id": "mode", "label": "Mode", "type": "select", "default": "safe",
|
||||
"options": [
|
||||
{"value": "safe", "label": "Safe"},
|
||||
{"value": "fast", "label": "Fast"},
|
||||
]},
|
||||
{"id": "note", "label": "Note", "type": "string", "default": ""},
|
||||
]
|
||||
|
||||
# Actions appear as buttons. Clicking one calls run(action, params, context)
|
||||
actions = [
|
||||
{"id": "do_work", "label": "Do Work", "description": "Process items"},
|
||||
]
|
||||
|
||||
def run(self, action: str, params: dict, context: dict):
|
||||
settings = context.get("settings", {})
|
||||
logger = context.get("logger")
|
||||
|
||||
if action == "do_work":
|
||||
limit = int(settings.get("limit", 5))
|
||||
mode = settings.get("mode", "safe")
|
||||
logger.info(f"My Plugin running with limit={limit}, mode={mode}")
|
||||
# Do a small amount of work here. Schedule Celery tasks for heavy work.
|
||||
return {"status": "ok", "processed": limit, "mode": mode}
|
||||
|
||||
return {"status": "error", "message": f"Unknown action {action}"}
|
||||
```
|
||||
|
||||
3) Open the Plugins page in the UI, click the refresh icon to reload discovery, then configure and run your plugin.
|
||||
|
||||
---
|
||||
|
||||
## Where Plugins Live
|
||||
|
||||
- Default directory: `/app/data/plugins` inside the container.
|
||||
- Override with env var: `DISPATCHARR_PLUGINS_DIR`.
|
||||
- Each plugin is a directory containing either:
|
||||
- `plugin.py` exporting a `Plugin` class, or
|
||||
- a Python package (`__init__.py`) exporting a `Plugin` class.
|
||||
|
||||
The directory name (lowercased, spaces as `_`) is used as the registry key and module import path (e.g. `my_plugin.plugin`).
|
||||
|
||||
---
|
||||
|
||||
## Discovery & Lifecycle
|
||||
|
||||
- Discovery runs at server startup and on-demand when:
|
||||
- Fetching the plugins list from the UI
|
||||
- Hitting `POST /api/plugins/plugins/reload/`
|
||||
- The loader imports each plugin module and instantiates `Plugin()`.
|
||||
- Metadata (name, version, description) and a per-plugin settings JSON are stored in the DB.
|
||||
|
||||
Backend code:
|
||||
- Loader: `apps/plugins/loader.py`
|
||||
- API Views: `apps/plugins/api_views.py`
|
||||
- API URLs: `apps/plugins/api_urls.py`
|
||||
- Model: `apps/plugins/models.py` (stores `enabled` flag and `settings` per plugin)
|
||||
|
||||
---
|
||||
|
||||
## Plugin Interface
|
||||
|
||||
Export a `Plugin` class. Supported attributes and behavior:
|
||||
|
||||
- `name` (str): Human-readable name.
|
||||
- `version` (str): Semantic version string.
|
||||
- `description` (str): Short description.
|
||||
- `fields` (list): Settings schema used by the UI to render controls.
|
||||
- `actions` (list): Available actions; the UI renders a Run button for each.
|
||||
- `run(action, params, context)` (callable): Invoked when a user clicks an action.
|
||||
|
||||
### Settings Schema
|
||||
Supported field `type`s:
|
||||
- `boolean`
|
||||
- `number`
|
||||
- `string`
|
||||
- `select` (requires `options`: `[{"value": ..., "label": ...}, ...]`)
|
||||
|
||||
Common field keys:
|
||||
- `id` (str): Settings key.
|
||||
- `label` (str): Label shown in the UI.
|
||||
- `type` (str): One of above.
|
||||
- `default` (any): Default value used until saved.
|
||||
- `help_text` (str, optional): Shown under the control.
|
||||
- `options` (list, for select): List of `{value, label}`.
|
||||
|
||||
The UI automatically renders settings and persists them. The backend stores settings in `PluginConfig.settings`.
|
||||
|
||||
Read settings in `run` via `context["settings"]`.
|
||||
|
||||
### Actions
|
||||
Each action is a dict:
|
||||
- `id` (str): Unique action id.
|
||||
- `label` (str): Button label.
|
||||
- `description` (str, optional): Helper text.
|
||||
|
||||
Clicking an action calls your plugin’s `run(action, params, context)` and shows a notification with the result or error.
|
||||
|
||||
### Action Confirmation (Modal)
|
||||
Developers can request a confirmation modal per action using the `confirm` key on the action. Options:
|
||||
|
||||
- Boolean: `confirm: true` will show a default confirmation modal.
|
||||
- Object: `confirm: { required: true, title: '...', message: '...' }` to customize the modal title and message.
|
||||
|
||||
Example:
|
||||
```
|
||||
actions = [
|
||||
{
|
||||
"id": "danger_run",
|
||||
"label": "Do Something Risky",
|
||||
"description": "Runs a job that affects many records.",
|
||||
"confirm": { "required": true, "title": "Proceed?", "message": "This will modify many records." },
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Accessing Dispatcharr APIs from Plugins
|
||||
|
||||
Plugins are server-side Python code running within the Django application. You can:
|
||||
|
||||
- Import models and run queries/updates:
|
||||
```
|
||||
from apps.m3u.models import M3UAccount
|
||||
from apps.epg.models import EPGSource
|
||||
from apps.channels.models import Channel
|
||||
from core.models import CoreSettings
|
||||
```
|
||||
|
||||
- Dispatch Celery tasks for heavy work (recommended):
|
||||
```
|
||||
from apps.m3u.tasks import refresh_m3u_accounts # apps/m3u/tasks.py
|
||||
from apps.epg.tasks import refresh_all_epg_data # apps/epg/tasks.py
|
||||
|
||||
refresh_m3u_accounts.delay()
|
||||
refresh_all_epg_data.delay()
|
||||
```
|
||||
|
||||
- Send WebSocket updates:
|
||||
```
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {"type": "plugin", "plugin": "my_plugin", "message": "Done"})
|
||||
```
|
||||
|
||||
- Use transactions:
|
||||
```
|
||||
from django.db import transaction
|
||||
with transaction.atomic():
|
||||
# bulk updates here
|
||||
...
|
||||
```
|
||||
|
||||
- Log via provided context or standard logging:
|
||||
```
|
||||
def run(self, action, params, context):
|
||||
logger = context.get("logger") # already configured
|
||||
logger.info("running action %s", action)
|
||||
```
|
||||
|
||||
Prefer Celery tasks (`.delay()`) to keep `run` fast and non-blocking.
|
||||
|
||||
---
|
||||
|
||||
## REST Endpoints (for UI and tooling)
|
||||
|
||||
- List plugins: `GET /api/plugins/plugins/`
|
||||
- Response: `{ "plugins": [{ key, name, version, description, enabled, fields, settings, actions }, ...] }`
|
||||
- Reload discovery: `POST /api/plugins/plugins/reload/`
|
||||
- Import plugin: `POST /api/plugins/plugins/import/` with form-data file field `file`
|
||||
- Update settings: `POST /api/plugins/plugins/<key>/settings/` with `{"settings": {...}}`
|
||||
- Run action: `POST /api/plugins/plugins/<key>/run/` with `{"action": "id", "params": {...}}`
|
||||
- Enable/disable: `POST /api/plugins/plugins/<key>/enabled/` with `{"enabled": true|false}`
|
||||
|
||||
Notes:
|
||||
- When disabled, a plugin cannot run actions; backend returns HTTP 403.
|
||||
|
||||
---
|
||||
|
||||
## Importing Plugins
|
||||
|
||||
- In the UI, click the Import button on the Plugins page and upload a `.zip` containing a plugin folder.
|
||||
- The archive should contain either `plugin.py` or a Python package (`__init__.py`).
|
||||
- On success, the UI shows the plugin name/description and lets you enable it immediately (plugins are disabled by default).
|
||||
|
||||
---
|
||||
|
||||
## Enabling / Disabling Plugins
|
||||
|
||||
- Each plugin has a persisted `enabled` flag (default: disabled) and `ever_enabled` flag in the DB (`apps/plugins/models.py`).
|
||||
- New plugins are disabled by default and require an explicit enable.
|
||||
- The first time a plugin is enabled, the UI shows a trust warning modal explaining that plugins can run arbitrary server-side code.
|
||||
- The Plugins page shows a toggle in the card header. Turning it off dims the card and disables the Run button.
|
||||
- Backend enforcement: Attempts to run an action for a disabled plugin return HTTP 403.
|
||||
|
||||
---
|
||||
|
||||
## Example: Refresh All Sources Plugin
|
||||
|
||||
Path: `data/plugins/refresh_all/plugin.py`
|
||||
|
||||
```
|
||||
class Plugin:
|
||||
name = "Refresh All Sources"
|
||||
version = "1.0.0"
|
||||
description = "Force refresh all M3U accounts and EPG sources."
|
||||
|
||||
fields = [
|
||||
{"id": "confirm", "label": "Require confirmation", "type": "boolean", "default": True,
|
||||
"help_text": "If enabled, the UI should ask before running."}
|
||||
]
|
||||
|
||||
actions = [
|
||||
{"id": "refresh_all", "label": "Refresh All M3Us and EPGs",
|
||||
"description": "Queues background refresh for all active M3U accounts and EPG sources."}
|
||||
]
|
||||
|
||||
def run(self, action: str, params: dict, context: dict):
|
||||
if action == "refresh_all":
|
||||
from apps.m3u.tasks import refresh_m3u_accounts
|
||||
from apps.epg.tasks import refresh_all_epg_data
|
||||
refresh_m3u_accounts.delay()
|
||||
refresh_all_epg_data.delay()
|
||||
return {"status": "queued", "message": "Refresh jobs queued"}
|
||||
return {"status": "error", "message": f"Unknown action: {action}"}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Keep `run` short and schedule heavy operations via Celery tasks.
|
||||
- Validate and sanitize `params` received from the UI.
|
||||
- Use database transactions for bulk or related updates.
|
||||
- Log actionable messages for troubleshooting.
|
||||
- Only write files under `/data` or `/app/data` paths.
|
||||
- Treat plugins as trusted code: they run with full app permissions.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- Plugin not listed: ensure the folder exists and contains `plugin.py` with a `Plugin` class.
|
||||
- Import errors: the folder name is the import name; avoid spaces or exotic characters.
|
||||
- No confirmation: include a boolean field with `id: "confirm"` and set it to true or default true.
|
||||
- HTTP 403 on run: the plugin is disabled; enable it from the toggle or via the `enabled/` endpoint.
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
- Keep dependencies minimal. Vendoring small helpers into the plugin folder is acceptable.
|
||||
- Use the existing task and model APIs where possible; propose extensions if you need new capabilities.
|
||||
|
||||
---
|
||||
|
||||
## Internals Reference
|
||||
|
||||
- Loader: `apps/plugins/loader.py`
|
||||
- API Views: `apps/plugins/api_views.py`
|
||||
- API URLs: `apps/plugins/api_urls.py`
|
||||
- Model: `apps/plugins/models.py`
|
||||
- Frontend page: `frontend/src/pages/Plugins.jsx`
|
||||
- Sidebar entry: `frontend/src/components/Sidebar.jsx`
|
||||
|
|
@ -22,6 +22,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
📊 **Real-Time Stats Dashboard** — Live insights into stream health and client activity\
|
||||
🧠 **EPG Auto-Match** — Match program data to channels automatically\
|
||||
⚙️ **Streamlink + FFmpeg Support** — Flexible backend options for streaming and recording\
|
||||
🎬 **VOD Management** — Full Video on Demand support with movies and TV series\
|
||||
🧼 **UI & UX Enhancements** — Smoother, faster, more responsive interface\
|
||||
🛁 **Output Compatibility** — HDHomeRun, M3U, and XMLTV EPG support for Plex, Jellyfin, and more
|
||||
|
||||
|
|
@ -31,6 +32,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
|
||||
✅ **Full IPTV Control** — Import, organize, proxy, and monitor IPTV streams on your own terms\
|
||||
✅ **Smart Playlist Handling** — M3U import, filtering, grouping, and failover support\
|
||||
✅ **VOD Content Management** — Organize movies and TV series with metadata and streaming\
|
||||
✅ **Reliable EPG Integration** — Match and manage TV guide data with ease\
|
||||
✅ **Clean & Responsive Interface** — Modern design that gets out of your way\
|
||||
✅ **Fully Self-Hosted** — Total control, zero reliance on third-party services
|
||||
|
|
@ -104,7 +106,7 @@ Here’s how you can join the party:
|
|||
## 📚 Roadmap & Documentation
|
||||
|
||||
- 📚 **Roadmap:** Coming soon!
|
||||
- 📖 **Wiki:** In progress — tutorials, API references, and advanced setup guides on the way!
|
||||
- 📖 **Documentation:** [Dispatcharr Docs](https://dispatcharr.github.io/Dispatcharr-Docs/)
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -133,4 +135,4 @@ Have a question? Want to suggest a feature? Just want to say hi?\
|
|||
|
||||
---
|
||||
|
||||
### 🚀 *Happy Streaming! The Dispatcharr Team*
|
||||
### 🚀 *Happy Streaming! The Dispatcharr Team*
|
||||
|
|
|
|||
|
|
@ -1,41 +1,39 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import (
|
||||
AuthViewSet, UserViewSet, GroupViewSet,
|
||||
list_permissions, initialize_superuser
|
||||
AuthViewSet,
|
||||
UserViewSet,
|
||||
GroupViewSet,
|
||||
TokenObtainPairView,
|
||||
TokenRefreshView,
|
||||
list_permissions,
|
||||
initialize_superuser,
|
||||
)
|
||||
from rest_framework_simplejwt import views as jwt_views
|
||||
|
||||
app_name = 'accounts'
|
||||
app_name = "accounts"
|
||||
|
||||
# 🔹 Register ViewSets with a Router
|
||||
router = DefaultRouter()
|
||||
router.register(r'users', UserViewSet, basename='user')
|
||||
router.register(r'groups', GroupViewSet, basename='group')
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"groups", GroupViewSet, basename="group")
|
||||
|
||||
# 🔹 Custom Authentication Endpoints
|
||||
auth_view = AuthViewSet.as_view({
|
||||
'post': 'login'
|
||||
})
|
||||
auth_view = AuthViewSet.as_view({"post": "login"})
|
||||
|
||||
logout_view = AuthViewSet.as_view({
|
||||
'post': 'logout'
|
||||
})
|
||||
logout_view = AuthViewSet.as_view({"post": "logout"})
|
||||
|
||||
# 🔹 Define API URL patterns
|
||||
urlpatterns = [
|
||||
# Authentication
|
||||
path('auth/login/', auth_view, name='user-login'),
|
||||
path('auth/logout/', logout_view, name='user-logout'),
|
||||
|
||||
path("auth/login/", auth_view, name="user-login"),
|
||||
path("auth/logout/", logout_view, name="user-logout"),
|
||||
# Superuser API
|
||||
path('initialize-superuser/', initialize_superuser, name='initialize_superuser'),
|
||||
|
||||
path("initialize-superuser/", initialize_superuser, name="initialize_superuser"),
|
||||
# Permissions API
|
||||
path('permissions/', list_permissions, name='list-permissions'),
|
||||
|
||||
path('token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'),
|
||||
path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
|
||||
path("permissions/", list_permissions, name="list-permissions"),
|
||||
path("token/", TokenObtainPairView.as_view(), name="token_obtain_pair"),
|
||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||
]
|
||||
|
||||
# 🔹 Include ViewSet routes
|
||||
|
|
|
|||
|
|
@ -2,16 +2,110 @@ from django.contrib.auth import authenticate, login, logout
|
|||
from django.contrib.auth.models import Group, Permission
|
||||
from django.http import JsonResponse, HttpResponse
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.decorators import api_view, permission_classes, action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import viewsets
|
||||
from rest_framework import viewsets, status
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
import json
|
||||
from .permissions import IsAdmin, Authenticated
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
|
||||
from .models import User
|
||||
from .serializers import UserSerializer, GroupSerializer, PermissionSerializer
|
||||
from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView
|
||||
|
||||
|
||||
class TokenObtainPairView(TokenObtainPairView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
# Log blocked login attempt due to network restrictions
|
||||
from core.utils import log_system_event
|
||||
username = request.data.get("username", 'unknown')
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Network access denied',
|
||||
)
|
||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Get the response from the parent class first
|
||||
username = request.data.get("username")
|
||||
|
||||
# Log login attempt
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
try:
|
||||
response = super().post(request, *args, **kwargs)
|
||||
|
||||
# If login was successful, update last_login and log success
|
||||
if response.status_code == 200:
|
||||
if username:
|
||||
from django.utils import timezone
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
# Log successful login
|
||||
log_system_event(
|
||||
event_type='login_success',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
except User.DoesNotExist:
|
||||
pass # User doesn't exist, but login somehow succeeded
|
||||
else:
|
||||
# Log failed login attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Invalid credentials',
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
# If parent class raises an exception (e.g., validation error), log failed attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason=f'Authentication error: {str(e)[:100]}',
|
||||
)
|
||||
raise # Re-raise the exception to maintain normal error flow
|
||||
|
||||
|
||||
class TokenRefreshView(TokenRefreshView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
# Log blocked token refresh attempt due to network restrictions
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user='token_refresh',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Network access denied (token refresh)',
|
||||
)
|
||||
return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return super().post(request, *args, **kwargs)
|
||||
|
||||
|
||||
@csrf_exempt # In production, consider CSRF protection strategies or ensure this endpoint is only accessible when no superuser exists.
|
||||
def initialize_superuser(request):
|
||||
|
|
@ -26,56 +120,114 @@ def initialize_superuser(request):
|
|||
password = data.get("password")
|
||||
email = data.get("email", "")
|
||||
if not username or not password:
|
||||
return JsonResponse({"error": "Username and password are required."}, status=400)
|
||||
return JsonResponse(
|
||||
{"error": "Username and password are required."}, status=400
|
||||
)
|
||||
# Create the superuser
|
||||
User.objects.create_superuser(username=username, password=password, email=email)
|
||||
User.objects.create_superuser(
|
||||
username=username, password=password, email=email, user_level=10
|
||||
)
|
||||
return JsonResponse({"superuser_exists": True})
|
||||
except Exception as e:
|
||||
return JsonResponse({"error": str(e)}, status=500)
|
||||
# For GET requests, indicate no superuser exists
|
||||
return JsonResponse({"superuser_exists": False})
|
||||
|
||||
|
||||
# 🔹 1) Authentication APIs
|
||||
class AuthViewSet(viewsets.ViewSet):
|
||||
"""Handles user login and logout"""
|
||||
|
||||
def get_permissions(self):
|
||||
"""
|
||||
Login doesn't require auth, but logout does
|
||||
"""
|
||||
if self.action == 'logout':
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
return [IsAuthenticated()]
|
||||
return []
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Authenticate and log in a user",
|
||||
request_body=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
required=['username', 'password'],
|
||||
required=["username", "password"],
|
||||
properties={
|
||||
'username': openapi.Schema(type=openapi.TYPE_STRING),
|
||||
'password': openapi.Schema(type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD)
|
||||
"username": openapi.Schema(type=openapi.TYPE_STRING),
|
||||
"password": openapi.Schema(
|
||||
type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD
|
||||
),
|
||||
},
|
||||
),
|
||||
responses={200: "Login successful", 400: "Invalid credentials"},
|
||||
)
|
||||
def login(self, request):
|
||||
"""Logs in a user and returns user details"""
|
||||
username = request.data.get('username')
|
||||
password = request.data.get('password')
|
||||
username = request.data.get("username")
|
||||
password = request.data.get("password")
|
||||
user = authenticate(request, username=username, password=password)
|
||||
|
||||
# Get client info for logging
|
||||
from core.utils import log_system_event
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
if user:
|
||||
login(request, user)
|
||||
return Response({
|
||||
"message": "Login successful",
|
||||
"user": {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"email": user.email,
|
||||
"groups": list(user.groups.values_list('name', flat=True))
|
||||
# Update last_login timestamp
|
||||
from django.utils import timezone
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
# Log successful login
|
||||
log_system_event(
|
||||
event_type='login_success',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": "Login successful",
|
||||
"user": {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"email": user.email,
|
||||
"groups": list(user.groups.values_list("name", flat=True)),
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
# Log failed login attempt
|
||||
log_system_event(
|
||||
event_type='login_failed',
|
||||
user=username or 'unknown',
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
reason='Invalid credentials',
|
||||
)
|
||||
return Response({"error": "Invalid credentials"}, status=400)
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Log out the current user",
|
||||
responses={200: "Logout successful"}
|
||||
responses={200: "Logout successful"},
|
||||
)
|
||||
def logout(self, request):
|
||||
"""Logs out the authenticated user"""
|
||||
# Log logout event before actually logging out
|
||||
from core.utils import log_system_event
|
||||
username = request.user.username if request.user and request.user.is_authenticated else 'unknown'
|
||||
client_ip = request.META.get('REMOTE_ADDR', 'unknown')
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
log_system_event(
|
||||
event_type='logout',
|
||||
user=username,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
|
||||
logout(request)
|
||||
return Response({"message": "Logout successful"})
|
||||
|
||||
|
|
@ -83,13 +235,19 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
# 🔹 2) User Management APIs
|
||||
class UserViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Users"""
|
||||
queryset = User.objects.all()
|
||||
|
||||
queryset = User.objects.all().prefetch_related('channel_profiles')
|
||||
serializer_class = UserSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "me":
|
||||
return [Authenticated()]
|
||||
|
||||
return [IsAdmin()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve a list of users",
|
||||
responses={200: UserSerializer(many=True)}
|
||||
responses={200: UserSerializer(many=True)},
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
|
@ -110,17 +268,28 @@ class UserViewSet(viewsets.ModelViewSet):
|
|||
def destroy(self, request, *args, **kwargs):
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
@swagger_auto_schema(
|
||||
method="get",
|
||||
operation_description="Get active user information",
|
||||
)
|
||||
@action(detail=False, methods=["get"], url_path="me")
|
||||
def me(self, request):
|
||||
user = request.user
|
||||
serializer = UserSerializer(user)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
# 🔹 3) Group Management APIs
|
||||
class GroupViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Groups"""
|
||||
|
||||
queryset = Group.objects.all()
|
||||
serializer_class = GroupSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [Authenticated]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve a list of groups",
|
||||
responses={200: GroupSerializer(many=True)}
|
||||
responses={200: GroupSerializer(many=True)},
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
|
@ -144,12 +313,12 @@ class GroupViewSet(viewsets.ModelViewSet):
|
|||
|
||||
# 🔹 4) Permissions List API
|
||||
@swagger_auto_schema(
|
||||
method='get',
|
||||
method="get",
|
||||
operation_description="Retrieve a list of all permissions",
|
||||
responses={200: PermissionSerializer(many=True)}
|
||||
responses={200: PermissionSerializer(many=True)},
|
||||
)
|
||||
@api_view(['GET'])
|
||||
@permission_classes([IsAuthenticated])
|
||||
@api_view(["GET"])
|
||||
@permission_classes([Authenticated])
|
||||
def list_permissions(request):
|
||||
"""Returns a list of all available permissions"""
|
||||
permissions = Permission.objects.all()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AccountsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.accounts'
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.accounts"
|
||||
verbose_name = "Accounts & Authentication"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-18 15:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def set_user_level_to_10(apps, schema_editor):
|
||||
User = apps.get_model("accounts", "User")
|
||||
User.objects.update(user_level=10)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0001_initial"),
|
||||
("dispatcharr_channels", "0021_channel_user_level"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="channel_groups",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="channel_profiles",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="users",
|
||||
to="dispatcharr_channels.channelprofile",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="user_level",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="custom_properties",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.RunPython(set_user_level_to_10),
|
||||
]
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0002_remove_user_channel_groups_user_channel_profiles_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -2,17 +2,26 @@
|
|||
from django.db import models
|
||||
from django.contrib.auth.models import AbstractUser, Permission
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
"""
|
||||
Custom user model for Dispatcharr.
|
||||
Inherits from Django's AbstractUser to add additional fields if needed.
|
||||
"""
|
||||
|
||||
class UserLevel(models.IntegerChoices):
|
||||
STREAMER = 0, "Streamer"
|
||||
STANDARD = 1, "Standard User"
|
||||
ADMIN = 10, "Admin"
|
||||
|
||||
avatar_config = models.JSONField(default=dict, blank=True, null=True)
|
||||
channel_groups = models.ManyToManyField(
|
||||
'dispatcharr_channels.ChannelGroup', # Updated reference to renamed model
|
||||
channel_profiles = models.ManyToManyField(
|
||||
"dispatcharr_channels.ChannelProfile",
|
||||
blank=True,
|
||||
related_name="users"
|
||||
related_name="users",
|
||||
)
|
||||
user_level = models.IntegerField(default=UserLevel.STREAMER)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.username
|
||||
|
|
|
|||
56
apps/accounts/permissions.py
Normal file
56
apps/accounts/permissions.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
from rest_framework.permissions import IsAuthenticated
|
||||
from .models import User
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
|
||||
|
||||
class Authenticated(IsAuthenticated):
|
||||
def has_permission(self, request, view):
|
||||
is_authenticated = super().has_permission(request, view)
|
||||
network_allowed = network_access_allowed(request, "UI")
|
||||
|
||||
return is_authenticated and network_allowed
|
||||
|
||||
|
||||
class IsStandardUser(Authenticated):
|
||||
def has_permission(self, request, view):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
return request.user and request.user.user_level >= User.UserLevel.STANDARD
|
||||
|
||||
|
||||
class IsAdmin(Authenticated):
|
||||
def has_permission(self, request, view):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
return request.user.user_level >= 10
|
||||
|
||||
|
||||
class IsOwnerOfObject(Authenticated):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
is_admin = IsAdmin().has_permission(request, view)
|
||||
is_owner = request.user in obj.users.all()
|
||||
|
||||
return is_admin or is_owner
|
||||
|
||||
|
||||
permission_classes_by_action = {
|
||||
"list": [IsStandardUser],
|
||||
"create": [IsAdmin],
|
||||
"retrieve": [IsStandardUser],
|
||||
"update": [IsAdmin],
|
||||
"partial_update": [IsAdmin],
|
||||
"destroy": [IsAdmin],
|
||||
}
|
||||
|
||||
permission_classes_by_method = {
|
||||
"GET": [IsStandardUser],
|
||||
"POST": [IsAdmin],
|
||||
"PATCH": [IsAdmin],
|
||||
"PUT": [IsAdmin],
|
||||
"DELETE": [IsAdmin],
|
||||
}
|
||||
|
|
@ -1,13 +1,14 @@
|
|||
from rest_framework import serializers
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
from .models import User
|
||||
from apps.channels.models import ChannelProfile
|
||||
|
||||
|
||||
# 🔹 Fix for Permission serialization
|
||||
class PermissionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Permission
|
||||
fields = ['id', 'name', 'codename']
|
||||
fields = ["id", "name", "codename"]
|
||||
|
||||
|
||||
# 🔹 Fix for Group serialization
|
||||
|
|
@ -18,15 +19,61 @@ class GroupSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = ['id', 'name', 'permissions']
|
||||
fields = ["id", "name", "permissions"]
|
||||
|
||||
|
||||
# 🔹 Fix for User serialization
|
||||
class UserSerializer(serializers.ModelSerializer):
|
||||
groups = serializers.SlugRelatedField(
|
||||
many=True, queryset=Group.objects.all(), slug_field="name"
|
||||
) # ✅ Fix ManyToMany `_meta` error
|
||||
password = serializers.CharField(write_only=True)
|
||||
channel_profiles = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ChannelProfile.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ['id', 'username', 'email', 'groups']
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"email",
|
||||
"user_level",
|
||||
"password",
|
||||
"channel_profiles",
|
||||
"custom_properties",
|
||||
"avatar_config",
|
||||
"is_active",
|
||||
"is_staff",
|
||||
"is_superuser",
|
||||
"last_login",
|
||||
"date_joined",
|
||||
"first_name",
|
||||
"last_name",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
channel_profiles = validated_data.pop("channel_profiles", [])
|
||||
|
||||
user = User(**validated_data)
|
||||
user.set_password(validated_data["password"])
|
||||
user.is_active = True
|
||||
user.save()
|
||||
|
||||
user.channel_profiles.set(channel_profiles)
|
||||
|
||||
return user
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
password = validated_data.pop("password", None)
|
||||
channel_profiles = validated_data.pop("channel_profiles", None)
|
||||
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
|
||||
if password:
|
||||
instance.set_password(password)
|
||||
|
||||
instance.save()
|
||||
|
||||
if channel_profiles is not None:
|
||||
instance.channel_profiles.set(channel_profiles)
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from django.db.models.signals import post_save
|
|||
from django.dispatch import receiver
|
||||
from .models import User
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def handle_new_user(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
from django.urls import path, include
|
||||
from django.urls import path, include, re_path
|
||||
from drf_yasg.views import get_schema_view
|
||||
from drf_yasg import openapi
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
# Configure Swagger Schema
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title="Dispatcharr API",
|
||||
|
|
@ -26,6 +25,9 @@ urlpatterns = [
|
|||
path('hdhr/', include(('apps.hdhr.api_urls', 'hdhr'), namespace='hdhr')),
|
||||
path('m3u/', include(('apps.m3u.api_urls', 'm3u'), namespace='m3u')),
|
||||
path('core/', include(('core.api_urls', 'core'), namespace='core')),
|
||||
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
|
||||
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
|
||||
path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')),
|
||||
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
|
||||
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
|
||||
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
|
||||
|
|
@ -34,7 +36,7 @@ urlpatterns = [
|
|||
|
||||
|
||||
# Swagger Documentation api_urls
|
||||
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^swagger/?$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
path('swagger.json', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
]
|
||||
|
|
|
|||
0
apps/backups/__init__.py
Normal file
0
apps/backups/__init__.py
Normal file
18
apps/backups/api_urls.py
Normal file
18
apps/backups/api_urls.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from django.urls import path
|
||||
|
||||
from . import api_views
|
||||
|
||||
app_name = "backups"
|
||||
|
||||
urlpatterns = [
|
||||
path("", api_views.list_backups, name="backup-list"),
|
||||
path("create/", api_views.create_backup, name="backup-create"),
|
||||
path("upload/", api_views.upload_backup, name="backup-upload"),
|
||||
path("schedule/", api_views.get_schedule, name="backup-schedule-get"),
|
||||
path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"),
|
||||
path("status/<str:task_id>/", api_views.backup_status, name="backup-status"),
|
||||
path("<str:filename>/download-token/", api_views.get_download_token, name="backup-download-token"),
|
||||
path("<str:filename>/download/", api_views.download_backup, name="backup-download"),
|
||||
path("<str:filename>/delete/", api_views.delete_backup, name="backup-delete"),
|
||||
path("<str:filename>/restore/", api_views.restore_backup, name="backup-restore"),
|
||||
]
|
||||
364
apps/backups/api_views.py
Normal file
364
apps/backups/api_views.py
Normal file
|
|
@ -0,0 +1,364 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse, StreamingHttpResponse, Http404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes, parser_classes
|
||||
from rest_framework.permissions import IsAdminUser, AllowAny
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from . import services
|
||||
from .tasks import create_backup_task, restore_backup_task
|
||||
from .scheduler import get_schedule_settings, update_schedule_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _generate_task_token(task_id: str) -> str:
|
||||
"""Generate a signed token for task status access without auth."""
|
||||
secret = settings.SECRET_KEY.encode()
|
||||
return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32]
|
||||
|
||||
|
||||
def _verify_task_token(task_id: str, token: str) -> bool:
|
||||
"""Verify a task token is valid."""
|
||||
expected = _generate_task_token(task_id)
|
||||
return hmac.compare_digest(expected, token)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def list_backups(request):
|
||||
"""List all available backup files."""
|
||||
try:
|
||||
backups = services.list_backups()
|
||||
return Response(backups, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to list backups: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def create_backup(request):
|
||||
"""Create a new backup (async via Celery)."""
|
||||
try:
|
||||
task = create_backup_task.delay()
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start backup: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def backup_status(request, task_id):
|
||||
"""Check the status of a backup/restore task.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid task_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (for restore when session is invalidated)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(task_id, token):
|
||||
return Response(
|
||||
{"detail": "Invalid task token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
result = AsyncResult(task_id)
|
||||
|
||||
if result.ready():
|
||||
task_result = result.get()
|
||||
if task_result.get("status") == "completed":
|
||||
return Response({
|
||||
"state": "completed",
|
||||
"result": task_result,
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": task_result.get("error", "Unknown error"),
|
||||
})
|
||||
elif result.failed():
|
||||
return Response({
|
||||
"state": "failed",
|
||||
"error": str(result.result),
|
||||
})
|
||||
else:
|
||||
return Response({
|
||||
"state": result.state.lower(),
|
||||
})
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get task status: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_download_token(request, filename):
|
||||
"""Get a signed token for downloading a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
token = _generate_task_token(filename)
|
||||
return Response({"token": token})
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to generate token: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([AllowAny])
|
||||
def download_backup(request, filename):
|
||||
"""Download a backup file.
|
||||
|
||||
Requires either:
|
||||
- Valid admin authentication, OR
|
||||
- Valid download_token query parameter
|
||||
"""
|
||||
# Check for token-based auth (avoids CORS preflight issues)
|
||||
token = request.query_params.get("token")
|
||||
if token:
|
||||
if not _verify_task_token(filename, token):
|
||||
return Response(
|
||||
{"detail": "Invalid download token"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
else:
|
||||
# Fall back to admin auth check
|
||||
if not request.user.is_authenticated or not request.user.is_staff:
|
||||
return Response(
|
||||
{"detail": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
try:
|
||||
# Security: prevent path traversal by checking for suspicious characters
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = (backup_dir / filename).resolve()
|
||||
|
||||
# Security: ensure the resolved path is still within backup_dir
|
||||
if not str(backup_file).startswith(str(backup_dir.resolve())):
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
if not backup_file.exists() or not backup_file.is_file():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
file_size = backup_file.stat().st_size
|
||||
|
||||
# Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly
|
||||
# Fall back to streaming for non-nginx deployments
|
||||
use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true"
|
||||
logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}")
|
||||
|
||||
if use_nginx_accel:
|
||||
# X-Accel-Redirect: Django returns immediately, nginx serves file
|
||||
logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}")
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = f"/protected-backups/{filename}"
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
else:
|
||||
# Streaming fallback for non-nginx deployments
|
||||
logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)")
|
||||
def file_iterator(file_path, chunk_size=2 * 1024 * 1024):
|
||||
with open(file_path, "rb") as f:
|
||||
while chunk := f.read(chunk_size):
|
||||
yield chunk
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
file_iterator(backup_file),
|
||||
content_type="application/zip",
|
||||
)
|
||||
response["Content-Length"] = file_size
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Download failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def delete_backup(request, filename):
|
||||
"""Delete a backup file."""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
services.delete_backup(filename)
|
||||
return Response(
|
||||
{"detail": "Backup deleted successfully"},
|
||||
status=status.HTTP_204_NO_CONTENT,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise Http404("Backup file not found")
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Delete failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
@parser_classes([MultiPartParser, FormParser])
|
||||
def upload_backup(request):
|
||||
"""Upload a backup file for restoration."""
|
||||
uploaded = request.FILES.get("file")
|
||||
if not uploaded:
|
||||
return Response(
|
||||
{"detail": "No file uploaded"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
backup_dir = services.get_backup_dir()
|
||||
filename = uploaded.name or "uploaded-backup.zip"
|
||||
|
||||
# Ensure unique filename
|
||||
backup_file = backup_dir / filename
|
||||
counter = 1
|
||||
while backup_file.exists():
|
||||
name_parts = filename.rsplit(".", 1)
|
||||
if len(name_parts) == 2:
|
||||
backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}"
|
||||
else:
|
||||
backup_file = backup_dir / f"{filename}-{counter}"
|
||||
counter += 1
|
||||
|
||||
# Save uploaded file
|
||||
with backup_file.open("wb") as f:
|
||||
for chunk in uploaded.chunks():
|
||||
f.write(chunk)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"detail": "Backup uploaded successfully",
|
||||
"filename": backup_file.name,
|
||||
},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Upload failed: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def restore_backup(request, filename):
|
||||
"""Restore from a backup file (async via Celery). WARNING: This will flush the database!"""
|
||||
try:
|
||||
# Security: prevent path traversal
|
||||
if ".." in filename or "/" in filename or "\\" in filename:
|
||||
raise Http404("Invalid filename")
|
||||
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise Http404("Backup file not found")
|
||||
|
||||
task = restore_backup_task.delay(filename)
|
||||
return Response(
|
||||
{
|
||||
"detail": "Restore started",
|
||||
"task_id": task.id,
|
||||
"task_token": _generate_task_token(task.id),
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Http404:
|
||||
raise
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to start restore: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def get_schedule(request):
|
||||
"""Get backup schedule settings."""
|
||||
try:
|
||||
settings = get_schedule_settings()
|
||||
return Response(settings)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to get schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@api_view(["PUT"])
|
||||
@permission_classes([IsAdminUser])
|
||||
def update_schedule(request):
|
||||
"""Update backup schedule settings."""
|
||||
try:
|
||||
settings = update_schedule_settings(request.data)
|
||||
return Response(settings)
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
{"detail": str(e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Failed to update schedule: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
7
apps/backups/apps.py
Normal file
7
apps/backups/apps.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class BackupsConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.backups"
|
||||
verbose_name = "Backups"
|
||||
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/migrations/__init__.py
Normal file
0
apps/backups/models.py
Normal file
0
apps/backups/models.py
Normal file
202
apps/backups/scheduler.py
Normal file
202
apps/backups/scheduler.py
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from django_celery_beat.models import PeriodicTask, CrontabSchedule
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task"
|
||||
|
||||
DEFAULTS = {
|
||||
"schedule_enabled": True,
|
||||
"schedule_frequency": "daily",
|
||||
"schedule_time": "03:00",
|
||||
"schedule_day_of_week": 0, # Sunday
|
||||
"retention_count": 3,
|
||||
"schedule_cron_expression": "",
|
||||
}
|
||||
|
||||
|
||||
def _get_backup_settings():
|
||||
"""Get all backup settings from CoreSettings grouped JSON."""
|
||||
try:
|
||||
settings_obj = CoreSettings.objects.get(key="backup_settings")
|
||||
return settings_obj.value if isinstance(settings_obj.value, dict) else DEFAULTS.copy()
|
||||
except CoreSettings.DoesNotExist:
|
||||
return DEFAULTS.copy()
|
||||
|
||||
|
||||
def _update_backup_settings(updates: dict) -> None:
|
||||
"""Update backup settings in the grouped JSON."""
|
||||
obj, created = CoreSettings.objects.get_or_create(
|
||||
key="backup_settings",
|
||||
defaults={"name": "Backup Settings", "value": DEFAULTS.copy()}
|
||||
)
|
||||
current = obj.value if isinstance(obj.value, dict) else {}
|
||||
current.update(updates)
|
||||
obj.value = current
|
||||
obj.save()
|
||||
|
||||
|
||||
def get_schedule_settings() -> dict:
|
||||
"""Get all backup schedule settings."""
|
||||
settings = _get_backup_settings()
|
||||
return {
|
||||
"enabled": bool(settings.get("schedule_enabled", DEFAULTS["schedule_enabled"])),
|
||||
"frequency": str(settings.get("schedule_frequency", DEFAULTS["schedule_frequency"])),
|
||||
"time": str(settings.get("schedule_time", DEFAULTS["schedule_time"])),
|
||||
"day_of_week": int(settings.get("schedule_day_of_week", DEFAULTS["schedule_day_of_week"])),
|
||||
"retention_count": int(settings.get("retention_count", DEFAULTS["retention_count"])),
|
||||
"cron_expression": str(settings.get("schedule_cron_expression", DEFAULTS["schedule_cron_expression"])),
|
||||
}
|
||||
|
||||
|
||||
def update_schedule_settings(data: dict) -> dict:
|
||||
"""Update backup schedule settings and sync the PeriodicTask."""
|
||||
# Validate
|
||||
if "frequency" in data and data["frequency"] not in ("daily", "weekly"):
|
||||
raise ValueError("frequency must be 'daily' or 'weekly'")
|
||||
|
||||
if "time" in data:
|
||||
try:
|
||||
hour, minute = data["time"].split(":")
|
||||
int(hour)
|
||||
int(minute)
|
||||
except (ValueError, AttributeError):
|
||||
raise ValueError("time must be in HH:MM format")
|
||||
|
||||
if "day_of_week" in data:
|
||||
day = int(data["day_of_week"])
|
||||
if day < 0 or day > 6:
|
||||
raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)")
|
||||
|
||||
if "retention_count" in data:
|
||||
count = int(data["retention_count"])
|
||||
if count < 0:
|
||||
raise ValueError("retention_count must be >= 0")
|
||||
|
||||
# Update settings with proper key names
|
||||
updates = {}
|
||||
if "enabled" in data:
|
||||
updates["schedule_enabled"] = bool(data["enabled"])
|
||||
if "frequency" in data:
|
||||
updates["schedule_frequency"] = str(data["frequency"])
|
||||
if "time" in data:
|
||||
updates["schedule_time"] = str(data["time"])
|
||||
if "day_of_week" in data:
|
||||
updates["schedule_day_of_week"] = int(data["day_of_week"])
|
||||
if "retention_count" in data:
|
||||
updates["retention_count"] = int(data["retention_count"])
|
||||
if "cron_expression" in data:
|
||||
updates["schedule_cron_expression"] = str(data["cron_expression"])
|
||||
|
||||
_update_backup_settings(updates)
|
||||
|
||||
# Sync the periodic task
|
||||
_sync_periodic_task()
|
||||
|
||||
return get_schedule_settings()
|
||||
|
||||
|
||||
def _sync_periodic_task() -> None:
|
||||
"""Create, update, or delete the scheduled backup task based on settings."""
|
||||
settings = get_schedule_settings()
|
||||
|
||||
if not settings["enabled"]:
|
||||
# Delete the task if it exists
|
||||
task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first()
|
||||
if task:
|
||||
old_crontab = task.crontab
|
||||
task.delete()
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
logger.info("Backup schedule disabled, removed periodic task")
|
||||
return
|
||||
|
||||
# Get old crontab before creating new one
|
||||
old_crontab = None
|
||||
try:
|
||||
old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME)
|
||||
old_crontab = old_task.crontab
|
||||
except PeriodicTask.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Check if using cron expression (advanced mode)
|
||||
if settings["cron_expression"]:
|
||||
# Parse cron expression: "minute hour day month weekday"
|
||||
try:
|
||||
parts = settings["cron_expression"].split()
|
||||
if len(parts) != 5:
|
||||
raise ValueError("Cron expression must have 5 parts: minute hour day month weekday")
|
||||
|
||||
minute, hour, day_of_month, month_of_year, day_of_week = parts
|
||||
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=day_of_week,
|
||||
day_of_month=day_of_month,
|
||||
month_of_year=month_of_year,
|
||||
timezone=CoreSettings.get_system_time_zone(),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}")
|
||||
raise ValueError(f"Invalid cron expression: {e}")
|
||||
else:
|
||||
# Use simple frequency-based scheduling
|
||||
# Parse time
|
||||
hour, minute = settings["time"].split(":")
|
||||
|
||||
# Build crontab based on frequency
|
||||
system_tz = CoreSettings.get_system_time_zone()
|
||||
if settings["frequency"] == "daily":
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week="*",
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
else: # weekly
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day_of_week=str(settings["day_of_week"]),
|
||||
day_of_month="*",
|
||||
month_of_year="*",
|
||||
timezone=system_tz,
|
||||
)
|
||||
|
||||
# Create or update the periodic task
|
||||
task, created = PeriodicTask.objects.update_or_create(
|
||||
name=BACKUP_SCHEDULE_TASK_NAME,
|
||||
defaults={
|
||||
"task": "apps.backups.tasks.scheduled_backup_task",
|
||||
"crontab": crontab,
|
||||
"enabled": True,
|
||||
"kwargs": json.dumps({"retention_count": settings["retention_count"]}),
|
||||
},
|
||||
)
|
||||
|
||||
# Clean up old crontab if it changed and is orphaned
|
||||
if old_crontab and old_crontab.id != crontab.id:
|
||||
_cleanup_orphaned_crontab(old_crontab)
|
||||
|
||||
action = "Created" if created else "Updated"
|
||||
logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}")
|
||||
|
||||
|
||||
def _cleanup_orphaned_crontab(crontab_schedule):
|
||||
"""Delete old CrontabSchedule if no other tasks are using it."""
|
||||
if crontab_schedule is None:
|
||||
return
|
||||
|
||||
# Check if any other tasks are using this crontab
|
||||
if PeriodicTask.objects.filter(crontab=crontab_schedule).exists():
|
||||
logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting")
|
||||
return
|
||||
|
||||
logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}")
|
||||
crontab_schedule.delete()
|
||||
350
apps/backups/services.py
Normal file
350
apps/backups/services.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import datetime
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile, ZIP_DEFLATED
|
||||
import logging
|
||||
import pytz
|
||||
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_backup_dir() -> Path:
|
||||
"""Get the backup directory, creating it if necessary."""
|
||||
backup_dir = Path(settings.BACKUP_ROOT)
|
||||
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
return backup_dir
|
||||
|
||||
|
||||
def _is_postgresql() -> bool:
|
||||
"""Check if we're using PostgreSQL."""
|
||||
return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql"
|
||||
|
||||
|
||||
def _get_pg_env() -> dict:
|
||||
"""Get environment variables for PostgreSQL commands."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
env = os.environ.copy()
|
||||
env["PGPASSWORD"] = db_config.get("PASSWORD", "")
|
||||
return env
|
||||
|
||||
|
||||
def _get_pg_args() -> list[str]:
|
||||
"""Get common PostgreSQL command arguments."""
|
||||
db_config = settings.DATABASES["default"]
|
||||
return [
|
||||
"-h", db_config.get("HOST", "localhost"),
|
||||
"-p", str(db_config.get("PORT", 5432)),
|
||||
"-U", db_config.get("USER", "postgres"),
|
||||
"-d", db_config.get("NAME", "dispatcharr"),
|
||||
]
|
||||
|
||||
|
||||
def _dump_postgresql(output_file: Path) -> None:
|
||||
"""Dump PostgreSQL database using pg_dump."""
|
||||
logger.info("Dumping PostgreSQL database with pg_dump...")
|
||||
|
||||
cmd = [
|
||||
"pg_dump",
|
||||
*_get_pg_args(),
|
||||
"-Fc", # Custom format for pg_restore
|
||||
"-v", # Verbose
|
||||
"-f", str(output_file),
|
||||
]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"pg_dump failed: {result.stderr}")
|
||||
raise RuntimeError(f"pg_dump failed: {result.stderr}")
|
||||
|
||||
logger.debug(f"pg_dump output: {result.stderr}")
|
||||
|
||||
|
||||
def _clean_postgresql_schema() -> None:
|
||||
"""Drop and recreate the public schema to ensure a completely clean restore."""
|
||||
logger.info("[PG_CLEAN] Dropping and recreating public schema...")
|
||||
|
||||
# Commands to drop and recreate schema
|
||||
sql_commands = "DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO public;"
|
||||
|
||||
cmd = [
|
||||
"psql",
|
||||
*_get_pg_args(),
|
||||
"-c", sql_commands,
|
||||
]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"[PG_CLEAN] Failed to clean schema: {result.stderr}")
|
||||
raise RuntimeError(f"Failed to clean PostgreSQL schema: {result.stderr}")
|
||||
|
||||
logger.info("[PG_CLEAN] Schema cleaned successfully")
|
||||
|
||||
|
||||
def _restore_postgresql(dump_file: Path) -> None:
|
||||
"""Restore PostgreSQL database using pg_restore."""
|
||||
logger.info("[PG_RESTORE] Starting pg_restore...")
|
||||
logger.info(f"[PG_RESTORE] Dump file: {dump_file}")
|
||||
|
||||
# Drop and recreate schema to ensure a completely clean restore
|
||||
_clean_postgresql_schema()
|
||||
|
||||
pg_args = _get_pg_args()
|
||||
logger.info(f"[PG_RESTORE] Connection args: {pg_args}")
|
||||
|
||||
cmd = [
|
||||
"pg_restore",
|
||||
"--no-owner", # Skip ownership commands (we already created schema)
|
||||
*pg_args,
|
||||
"-v", # Verbose
|
||||
str(dump_file),
|
||||
]
|
||||
|
||||
logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}")
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
env=_get_pg_env(),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
logger.info(f"[PG_RESTORE] Return code: {result.returncode}")
|
||||
|
||||
# pg_restore may return non-zero even on partial success
|
||||
# Check for actual errors vs warnings
|
||||
if result.returncode != 0:
|
||||
# Some errors during restore are expected (e.g., "does not exist" when cleaning)
|
||||
# Only fail on critical errors
|
||||
stderr = result.stderr.lower()
|
||||
if "fatal" in stderr or "could not connect" in stderr:
|
||||
logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}")
|
||||
raise RuntimeError(f"pg_restore failed: {result.stderr}")
|
||||
else:
|
||||
logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...")
|
||||
|
||||
logger.info("[PG_RESTORE] Completed successfully")
|
||||
|
||||
|
||||
def _dump_sqlite(output_file: Path) -> None:
|
||||
"""Dump SQLite database using sqlite3 .backup command."""
|
||||
logger.info("Dumping SQLite database with sqlite3 .backup...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
|
||||
if not db_path.exists():
|
||||
raise FileNotFoundError(f"SQLite database not found: {db_path}")
|
||||
|
||||
# Use sqlite3 .backup command via stdin for reliable execution
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=f".backup '{output_file}'\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 backup failed: {result.stderr}")
|
||||
raise RuntimeError(f"sqlite3 backup failed: {result.stderr}")
|
||||
|
||||
# Verify the backup file was created
|
||||
if not output_file.exists():
|
||||
raise RuntimeError("sqlite3 backup failed: output file not created")
|
||||
|
||||
logger.info(f"sqlite3 backup completed successfully: {output_file}")
|
||||
|
||||
|
||||
def _restore_sqlite(dump_file: Path) -> None:
|
||||
"""Restore SQLite database by replacing the database file."""
|
||||
logger.info("Restoring SQLite database...")
|
||||
db_path = Path(settings.DATABASES["default"]["NAME"])
|
||||
backup_current = None
|
||||
|
||||
# Backup current database before overwriting
|
||||
if db_path.exists():
|
||||
backup_current = db_path.with_suffix(".db.bak")
|
||||
shutil.copy2(db_path, backup_current)
|
||||
logger.info(f"Backed up current database to {backup_current}")
|
||||
|
||||
# Ensure parent directory exists
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# The backup file from _dump_sqlite is a complete SQLite database file
|
||||
# We can simply copy it over the existing database
|
||||
shutil.copy2(dump_file, db_path)
|
||||
|
||||
# Verify the restore worked by checking if sqlite3 can read it
|
||||
result = subprocess.run(
|
||||
["sqlite3", str(db_path)],
|
||||
input=".tables\n",
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.error(f"sqlite3 verification failed: {result.stderr}")
|
||||
# Try to restore from backup
|
||||
if backup_current and backup_current.exists():
|
||||
shutil.copy2(backup_current, db_path)
|
||||
logger.info("Restored original database from backup")
|
||||
raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}")
|
||||
|
||||
logger.info("sqlite3 restore completed successfully")
|
||||
|
||||
|
||||
def create_backup() -> Path:
|
||||
"""
|
||||
Create a backup archive containing database dump and data directories.
|
||||
Returns the path to the created backup file.
|
||||
"""
|
||||
backup_dir = get_backup_dir()
|
||||
|
||||
# Use system timezone for filename (user-friendly), but keep internal timestamps as UTC
|
||||
system_tz_name = CoreSettings.get_system_time_zone()
|
||||
try:
|
||||
system_tz = pytz.timezone(system_tz_name)
|
||||
now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz)
|
||||
timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC")
|
||||
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
|
||||
|
||||
backup_name = f"dispatcharr-backup-{timestamp}.zip"
|
||||
backup_file = backup_dir / backup_name
|
||||
|
||||
logger.info(f"Creating backup: {backup_name}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Determine database type and dump accordingly
|
||||
if _is_postgresql():
|
||||
db_dump_file = temp_path / "database.dump"
|
||||
_dump_postgresql(db_dump_file)
|
||||
db_type = "postgresql"
|
||||
else:
|
||||
db_dump_file = temp_path / "database.sqlite3"
|
||||
_dump_sqlite(db_dump_file)
|
||||
db_type = "sqlite"
|
||||
|
||||
# Create ZIP archive with compression and ZIP64 support for large files
|
||||
with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
|
||||
# Add database dump
|
||||
zip_file.write(db_dump_file, db_dump_file.name)
|
||||
|
||||
# Add metadata
|
||||
metadata = {
|
||||
"format": "dispatcharr-backup",
|
||||
"version": 2,
|
||||
"database_type": db_type,
|
||||
"database_file": db_dump_file.name,
|
||||
"created_at": datetime.datetime.now(datetime.UTC).isoformat(),
|
||||
}
|
||||
zip_file.writestr("metadata.json", json.dumps(metadata, indent=2))
|
||||
|
||||
logger.info(f"Backup created successfully: {backup_file}")
|
||||
return backup_file
|
||||
|
||||
|
||||
def restore_backup(backup_file: Path) -> None:
|
||||
"""
|
||||
Restore from a backup archive.
|
||||
WARNING: This will overwrite the database!
|
||||
"""
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {backup_file}")
|
||||
|
||||
logger.info(f"Restoring from backup: {backup_file}")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir:
|
||||
temp_path = Path(temp_dir)
|
||||
|
||||
# Extract backup
|
||||
logger.debug("Extracting backup archive...")
|
||||
with ZipFile(backup_file, "r") as zip_file:
|
||||
zip_file.extractall(temp_path)
|
||||
|
||||
# Read metadata
|
||||
metadata_file = temp_path / "metadata.json"
|
||||
if not metadata_file.exists():
|
||||
raise ValueError("Invalid backup: missing metadata.json")
|
||||
|
||||
with open(metadata_file) as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
# Restore database
|
||||
_restore_database(temp_path, metadata)
|
||||
|
||||
logger.info("Restore completed successfully")
|
||||
|
||||
|
||||
def _restore_database(temp_path: Path, metadata: dict) -> None:
|
||||
"""Restore database from backup."""
|
||||
db_type = metadata.get("database_type", "postgresql")
|
||||
db_file = metadata.get("database_file", "database.dump")
|
||||
dump_file = temp_path / db_file
|
||||
|
||||
if not dump_file.exists():
|
||||
raise ValueError(f"Invalid backup: missing {db_file}")
|
||||
|
||||
current_db_type = "postgresql" if _is_postgresql() else "sqlite"
|
||||
|
||||
if db_type != current_db_type:
|
||||
raise ValueError(
|
||||
f"Database type mismatch: backup is {db_type}, "
|
||||
f"but current database is {current_db_type}"
|
||||
)
|
||||
|
||||
if db_type == "postgresql":
|
||||
_restore_postgresql(dump_file)
|
||||
else:
|
||||
_restore_sqlite(dump_file)
|
||||
|
||||
|
||||
def list_backups() -> list[dict]:
|
||||
"""List all available backup files with metadata."""
|
||||
backup_dir = get_backup_dir()
|
||||
backups = []
|
||||
|
||||
for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True):
|
||||
# Use UTC timezone so frontend can convert to user's local time
|
||||
created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC)
|
||||
backups.append({
|
||||
"name": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"created": created_time.isoformat(),
|
||||
})
|
||||
|
||||
return backups
|
||||
|
||||
|
||||
def delete_backup(filename: str) -> None:
|
||||
"""Delete a backup file."""
|
||||
backup_dir = get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
|
||||
if not backup_file.exists():
|
||||
raise FileNotFoundError(f"Backup file not found: {filename}")
|
||||
|
||||
if not backup_file.is_file():
|
||||
raise ValueError(f"Invalid backup file: {filename}")
|
||||
|
||||
backup_file.unlink()
|
||||
logger.info(f"Deleted backup: {filename}")
|
||||
106
apps/backups/tasks.py
Normal file
106
apps/backups/tasks.py
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import logging
|
||||
import traceback
|
||||
from celery import shared_task
|
||||
|
||||
from . import services
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup_old_backups(retention_count: int) -> int:
|
||||
"""Delete old backups, keeping only the most recent N. Returns count deleted."""
|
||||
if retention_count <= 0:
|
||||
return 0
|
||||
|
||||
backups = services.list_backups()
|
||||
if len(backups) <= retention_count:
|
||||
return 0
|
||||
|
||||
# Backups are sorted newest first, so delete from the end
|
||||
to_delete = backups[retention_count:]
|
||||
deleted = 0
|
||||
|
||||
for backup in to_delete:
|
||||
try:
|
||||
services.delete_backup(backup["name"])
|
||||
deleted += 1
|
||||
logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}")
|
||||
except Exception as e:
|
||||
logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def create_backup_task(self):
|
||||
"""Celery task to create a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[BACKUP] Starting backup task {self.request.id}")
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_backup_task(self, filename: str):
|
||||
"""Celery task to restore a backup asynchronously."""
|
||||
try:
|
||||
logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}")
|
||||
backup_dir = services.get_backup_dir()
|
||||
backup_file = backup_dir / filename
|
||||
logger.info(f"[RESTORE] Backup file path: {backup_file}")
|
||||
services.restore_backup(backup_file)
|
||||
logger.info(f"[RESTORE] Task {self.request.id} completed successfully")
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": filename,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def scheduled_backup_task(self, retention_count: int = 0):
|
||||
"""Celery task for scheduled backups with optional retention cleanup."""
|
||||
try:
|
||||
logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}")
|
||||
|
||||
# Create backup
|
||||
backup_file = services.create_backup()
|
||||
logger.info(f"[SCHEDULED] Backup created: {backup_file.name}")
|
||||
|
||||
# Cleanup old backups if retention is set
|
||||
deleted = 0
|
||||
if retention_count > 0:
|
||||
deleted = _cleanup_old_backups(retention_count)
|
||||
logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"filename": backup_file.name,
|
||||
"size": backup_file.stat().st_size,
|
||||
"deleted_count": deleted,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}")
|
||||
logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}")
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
}
|
||||
1163
apps/backups/tests.py
Normal file
1163
apps/backups/tests.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -6,12 +6,21 @@ from .api_views import (
|
|||
ChannelGroupViewSet,
|
||||
BulkDeleteStreamsAPIView,
|
||||
BulkDeleteChannelsAPIView,
|
||||
BulkDeleteLogosAPIView,
|
||||
CleanupUnusedLogosAPIView,
|
||||
LogoViewSet,
|
||||
ChannelProfileViewSet,
|
||||
UpdateChannelMembershipAPIView,
|
||||
BulkUpdateChannelMembershipAPIView,
|
||||
RecordingViewSet,
|
||||
RecurringRecordingRuleViewSet,
|
||||
GetChannelStreamsAPIView,
|
||||
SeriesRulesAPIView,
|
||||
DeleteSeriesRuleAPIView,
|
||||
EvaluateSeriesRulesAPIView,
|
||||
BulkRemoveSeriesRecordingsAPIView,
|
||||
BulkDeleteUpcomingRecordingsAPIView,
|
||||
ComskipConfigAPIView,
|
||||
)
|
||||
|
||||
app_name = 'channels' # for DRF routing
|
||||
|
|
@ -23,14 +32,24 @@ router.register(r'channels', ChannelViewSet, basename='channel')
|
|||
router.register(r'logos', LogoViewSet, basename='logo')
|
||||
router.register(r'profiles', ChannelProfileViewSet, basename='profile')
|
||||
router.register(r'recordings', RecordingViewSet, basename='recording')
|
||||
router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule')
|
||||
|
||||
urlpatterns = [
|
||||
# Bulk delete is a single APIView, not a ViewSet
|
||||
path('streams/bulk-delete/', BulkDeleteStreamsAPIView.as_view(), name='bulk_delete_streams'),
|
||||
path('channels/bulk-delete/', BulkDeleteChannelsAPIView.as_view(), name='bulk_delete_channels'),
|
||||
path('logos/bulk-delete/', BulkDeleteLogosAPIView.as_view(), name='bulk_delete_logos'),
|
||||
path('logos/cleanup/', CleanupUnusedLogosAPIView.as_view(), name='cleanup_unused_logos'),
|
||||
path('channels/<int:channel_id>/streams/', GetChannelStreamsAPIView.as_view(), name='get_channel_streams'),
|
||||
path('profiles/<int:profile_id>/channels/<int:channel_id>/', UpdateChannelMembershipAPIView.as_view(), name='update_channel_membership'),
|
||||
path('profiles/<int:profile_id>/channels/bulk-update/', BulkUpdateChannelMembershipAPIView.as_view(), name='bulk_update_channel_membership'),
|
||||
# DVR series rules (order matters: specific routes before catch-all slug)
|
||||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -14,6 +14,13 @@ class ChannelGroupForm(forms.ModelForm):
|
|||
# Channel Form
|
||||
#
|
||||
class ChannelForm(forms.ModelForm):
|
||||
# Explicitly define channel_number as FloatField to ensure decimal values work
|
||||
channel_number = forms.FloatField(
|
||||
required=False,
|
||||
widget=forms.NumberInput(attrs={'step': '0.1'}), # Allow decimal steps
|
||||
help_text="Channel number can include decimals (e.g., 1.1, 2.5)"
|
||||
)
|
||||
|
||||
channel_group = forms.ModelChoiceField(
|
||||
queryset=ChannelGroup.objects.all(),
|
||||
required=False,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-04-27 14:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0017_alter_channelgroup_name'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='custom_properties',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/channels/migrations/0019_channel_tvc_guide_stationid.py
Normal file
18
apps/channels/migrations/0019_channel_tvc_guide_stationid.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-04 00:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0018_channelgroupm3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='tvc_guide_stationid',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-15 19:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0019_channel_tvc_guide_stationid'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channel',
|
||||
name='channel_number',
|
||||
field=models.FloatField(db_index=True),
|
||||
),
|
||||
]
|
||||
18
apps/channels/migrations/0021_channel_user_level.py
Normal file
18
apps/channels/migrations/0021_channel_user_level.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-18 14:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0020_alter_channel_channel_number'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='user_level',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-13 23:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0021_channel_user_level'),
|
||||
('m3u', '0012_alter_m3uaccount_refresh_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='auto_created',
|
||||
field=models.BooleanField(default=False, help_text='Whether this channel was automatically created via M3U auto channel sync'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='auto_created_by',
|
||||
field=models.ForeignKey(blank=True, help_text='The M3U account that auto-created this channel', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='auto_created_channels', to='m3u.m3uaccount'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='auto_channel_sync',
|
||||
field=models.BooleanField(default=False, help_text='Automatically create/delete channels to match streams in this group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='auto_sync_channel_start',
|
||||
field=models.FloatField(blank=True, help_text='Starting channel number for auto-created channels in this group', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-29 02:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0022_channel_auto_created_channel_auto_created_by_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='stream_stats',
|
||||
field=models.JSONField(blank=True, help_text='JSON object containing stream statistics like video codec, resolution, etc.', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='stream_stats_updated_at',
|
||||
field=models.DateTimeField(blank=True, db_index=True, help_text='When stream statistics were last updated', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-22 20:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0023_stream_stream_stats_stream_stream_stats_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='channel_group',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_accounts', to='dispatcharr_channels.channelgroup'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0024_alter_channelgroupm3uaccount_channel_group'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recording',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='stream',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 5.0.14 on 2025-09-18 14:56
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RecurringRecordingRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('days_of_week', models.JSONField(default=list)),
|
||||
('start_time', models.TimeField()),
|
||||
('end_time', models.TimeField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('name', models.CharField(blank=True, max_length=255)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['channel', 'start_time'],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-05 20:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0026_recurringrecordingrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='end_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='start_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-06 22:55
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0027_recurringrecordingrule_end_date_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='created_at',
|
||||
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='Timestamp when this channel was created'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(auto_now=True, help_text='Timestamp when this channel was last updated'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# Generated migration to backfill stream_hash for existing custom streams
|
||||
|
||||
from django.db import migrations
|
||||
import hashlib
|
||||
|
||||
|
||||
def backfill_custom_stream_hashes(apps, schema_editor):
|
||||
"""
|
||||
Generate stream_hash for all custom streams that don't have one.
|
||||
Uses stream ID to create a stable hash that won't change when name/url is edited.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams_without_hash = Stream.objects.filter(
|
||||
is_custom=True,
|
||||
stream_hash__isnull=True
|
||||
)
|
||||
|
||||
updated_count = 0
|
||||
for stream in custom_streams_without_hash:
|
||||
# Generate a stable hash using the stream's ID
|
||||
# This ensures the hash never changes even if name/url is edited
|
||||
unique_string = f"custom_stream_{stream.id}"
|
||||
stream.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
stream.save(update_fields=['stream_hash'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Backfilled stream_hash for {updated_count} custom streams")
|
||||
else:
|
||||
print("No custom streams needed stream_hash backfill")
|
||||
|
||||
|
||||
def reverse_backfill(apps, schema_editor):
|
||||
"""
|
||||
Reverse migration - clear stream_hash for custom streams.
|
||||
Note: This will break preview functionality for custom streams.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams = Stream.objects.filter(is_custom=True)
|
||||
count = custom_streams.update(stream_hash=None)
|
||||
print(f"Cleared stream_hash for {count} custom streams")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0028_channel_created_at_channel_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(backfill_custom_stream_hashes, reverse_backfill),
|
||||
]
|
||||
18
apps/channels/migrations/0030_alter_stream_url.py
Normal file
18
apps/channels/migrations/0030_alter_stream_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-28 20:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0029_backfill_custom_stream_hashes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='stream',
|
||||
name='url',
|
||||
field=models.URLField(blank=True, max_length=4096, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# Generated by Django 5.2.9 on 2026-01-09 18:19
|
||||
|
||||
import datetime
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0030_alter_stream_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='is_stale',
|
||||
field=models.BooleanField(db_index=True, default=False, help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='last_seen',
|
||||
field=models.DateTimeField(db_index=True, default=datetime.datetime.now, help_text='Last time this group was seen in the M3U source during a refresh'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='is_stale',
|
||||
field=models.BooleanField(db_index=True, default=False, help_text='Whether this stream is stale (not seen in recent refresh, pending deletion)'),
|
||||
),
|
||||
]
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
from django.db import models
|
||||
from django.core.exceptions import ValidationError
|
||||
from core.models import StreamProfile
|
||||
from django.conf import settings
|
||||
from core.models import StreamProfile, CoreSettings
|
||||
from core.utils import RedisClient
|
||||
|
|
@ -10,12 +9,14 @@ from datetime import datetime
|
|||
import hashlib
|
||||
import json
|
||||
from apps.epg.models import EPGData
|
||||
from apps.accounts.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# If you have an M3UAccount model in apps.m3u, you can still import it:
|
||||
from apps.m3u.models import M3UAccount
|
||||
|
||||
|
||||
# Add fallback functions if Redis isn't available
|
||||
def get_total_viewers(channel_id):
|
||||
"""Get viewer count from Redis or return 0 if Redis isn't available"""
|
||||
|
|
@ -26,6 +27,7 @@ def get_total_viewers(channel_id):
|
|||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
class ChannelGroup(models.Model):
|
||||
name = models.TextField(unique=True, db_index=True)
|
||||
|
||||
|
|
@ -46,12 +48,14 @@ class ChannelGroup(models.Model):
|
|||
|
||||
return created_objects
|
||||
|
||||
|
||||
class Stream(models.Model):
|
||||
"""
|
||||
Represents a single stream (e.g. from an M3U source or custom URL).
|
||||
"""
|
||||
|
||||
name = models.CharField(max_length=255, default="Default Stream")
|
||||
url = models.URLField(max_length=2000, blank=True, null=True)
|
||||
url = models.URLField(max_length=4096, blank=True, null=True)
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
|
|
@ -61,7 +65,7 @@ class Stream(models.Model):
|
|||
)
|
||||
logo_url = models.TextField(blank=True, null=True)
|
||||
tvg_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
local_file = models.FileField(upload_to='uploads/', blank=True, null=True)
|
||||
local_file = models.FileField(upload_to="uploads/", blank=True, null=True)
|
||||
current_viewers = models.PositiveIntegerField(default=0)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
channel_group = models.ForeignKey(
|
||||
|
|
@ -69,18 +73,18 @@ class Stream(models.Model):
|
|||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='streams'
|
||||
related_name="streams",
|
||||
)
|
||||
stream_profile = models.ForeignKey(
|
||||
StreamProfile,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='streams'
|
||||
related_name="streams",
|
||||
)
|
||||
is_custom = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this is a user-created stream or from an M3U account"
|
||||
help_text="Whether this is a user-created stream or from an M3U account",
|
||||
)
|
||||
stream_hash = models.CharField(
|
||||
max_length=255,
|
||||
|
|
@ -90,30 +94,48 @@ class Stream(models.Model):
|
|||
db_index=True,
|
||||
)
|
||||
last_seen = models.DateTimeField(db_index=True, default=datetime.now)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
is_stale = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this stream is stale (not seen in recent refresh, pending deletion)"
|
||||
)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
# Stream statistics fields
|
||||
stream_stats = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="JSON object containing stream statistics like video codec, resolution, etc."
|
||||
)
|
||||
stream_stats_updated_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When stream statistics were last updated",
|
||||
db_index=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
# If you use m3u_account, you might do unique_together = ('name','url','m3u_account')
|
||||
verbose_name = "Stream"
|
||||
verbose_name_plural = "Streams"
|
||||
ordering = ['-updated_at']
|
||||
ordering = ["-updated_at"]
|
||||
|
||||
def __str__(self):
|
||||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {
|
||||
"name": name, "url": url, "tvg_id": tvg_id
|
||||
}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
# Serialize and hash the dictionary
|
||||
serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering
|
||||
serialized_obj = json.dumps(
|
||||
hash_parts, sort_keys=True
|
||||
) # sort_keys ensures consistent ordering
|
||||
hash_object = hashlib.sha256(serialized_obj.encode())
|
||||
return hash_object.hexdigest()
|
||||
|
||||
|
|
@ -129,13 +151,23 @@ class Stream(models.Model):
|
|||
return stream, False # False means it was updated, not created
|
||||
except cls.DoesNotExist:
|
||||
# If it doesn't exist, create a new object with the given hash
|
||||
fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set
|
||||
fields_to_update["stream_hash"] = (
|
||||
hash_value # Make sure the hash field is set
|
||||
)
|
||||
stream = cls.objects.create(**fields_to_update)
|
||||
return stream, True # True means it was created
|
||||
|
||||
# @TODO: honor stream's stream profile
|
||||
def get_stream_profile(self):
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
|
||||
"""
|
||||
Get the stream profile for this stream.
|
||||
Uses the stream's own profile if set, otherwise returns the default.
|
||||
"""
|
||||
if self.stream_profile:
|
||||
return self.stream_profile
|
||||
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
|
||||
return stream_profile
|
||||
|
||||
|
|
@ -153,7 +185,9 @@ class Stream(models.Model):
|
|||
m3u_account = self.m3u_account
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
profiles = [default_profile] + [
|
||||
obj for obj in m3u_profiles if not obj.is_default
|
||||
]
|
||||
|
||||
for profile in profiles:
|
||||
logger.info(profile)
|
||||
|
|
@ -168,13 +202,19 @@ class Stream(models.Model):
|
|||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
# Start a new stream
|
||||
redis_client.set(f"channel_stream:{self.id}", self.id)
|
||||
redis_client.set(f"stream_profile:{self.id}", profile.id) # Store only the matched profile
|
||||
redis_client.set(
|
||||
f"stream_profile:{self.id}", profile.id
|
||||
) # Store only the matched profile
|
||||
|
||||
# Increment connection count for profiles with limits
|
||||
if profile.max_streams > 0:
|
||||
redis_client.incr(profile_connections_key)
|
||||
|
||||
return self.id, profile.id, None # Return newly assigned stream and matched profile
|
||||
return (
|
||||
self.id,
|
||||
profile.id,
|
||||
None,
|
||||
) # Return newly assigned stream and matched profile
|
||||
|
||||
# 4. No available streams
|
||||
return None, None, None
|
||||
|
|
@ -195,7 +235,9 @@ class Stream(models.Model):
|
|||
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
|
||||
|
||||
profile_id = int(profile_id)
|
||||
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
|
||||
logger.debug(
|
||||
f"Found profile ID {profile_id} associated with stream {stream_id}"
|
||||
)
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile_id}"
|
||||
|
||||
|
|
@ -204,45 +246,45 @@ class Stream(models.Model):
|
|||
if current_count > 0:
|
||||
redis_client.decr(profile_connections_key)
|
||||
|
||||
|
||||
class ChannelManager(models.Manager):
|
||||
def active(self):
|
||||
return self.all()
|
||||
|
||||
|
||||
class Channel(models.Model):
|
||||
channel_number = models.IntegerField(db_index=True)
|
||||
channel_number = models.FloatField(db_index=True)
|
||||
name = models.CharField(max_length=255)
|
||||
logo = models.ForeignKey(
|
||||
'Logo',
|
||||
"Logo",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels',
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
# M2M to Stream now in the same file
|
||||
streams = models.ManyToManyField(
|
||||
Stream,
|
||||
blank=True,
|
||||
through='ChannelStream',
|
||||
related_name='channels'
|
||||
Stream, blank=True, through="ChannelStream", related_name="channels"
|
||||
)
|
||||
|
||||
channel_group = models.ForeignKey(
|
||||
'ChannelGroup',
|
||||
"ChannelGroup",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels',
|
||||
help_text="Channel group this channel belongs to."
|
||||
related_name="channels",
|
||||
help_text="Channel group this channel belongs to.",
|
||||
)
|
||||
tvg_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
tvc_guide_stationid = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
epg_data = models.ForeignKey(
|
||||
EPGData,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels'
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
stream_profile = models.ForeignKey(
|
||||
|
|
@ -250,16 +292,41 @@ class Channel(models.Model):
|
|||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels'
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True)
|
||||
uuid = models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, unique=True, db_index=True
|
||||
)
|
||||
|
||||
user_level = models.IntegerField(default=0)
|
||||
|
||||
auto_created = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this channel was automatically created via M3U auto channel sync"
|
||||
)
|
||||
auto_created_by = models.ForeignKey(
|
||||
"m3u.M3UAccount",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="auto_created_channels",
|
||||
help_text="The M3U account that auto-created this channel"
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Timestamp when this channel was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="Timestamp when this channel was last updated"
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
# Enforce unique channel_number within a given group
|
||||
existing = Channel.objects.filter(
|
||||
channel_number=self.channel_number,
|
||||
channel_group=self.channel_group
|
||||
channel_number=self.channel_number, channel_group=self.channel_group
|
||||
).exclude(id=self.id)
|
||||
if existing.exists():
|
||||
raise ValidationError(
|
||||
|
|
@ -271,7 +338,7 @@ class Channel(models.Model):
|
|||
|
||||
@classmethod
|
||||
def get_next_available_channel_number(cls, starting_from=1):
|
||||
used_numbers = set(cls.objects.all().values_list('channel_number', flat=True))
|
||||
used_numbers = set(cls.objects.all().values_list("channel_number", flat=True))
|
||||
n = starting_from
|
||||
while n in used_numbers:
|
||||
n += 1
|
||||
|
|
@ -281,7 +348,9 @@ class Channel(models.Model):
|
|||
def get_stream_profile(self):
|
||||
stream_profile = self.stream_profile
|
||||
if not stream_profile:
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
|
||||
return stream_profile
|
||||
|
||||
|
|
@ -311,44 +380,55 @@ class Channel(models.Model):
|
|||
profile_id = int(profile_id_bytes)
|
||||
return stream_id, profile_id, None
|
||||
except (ValueError, TypeError):
|
||||
logger.debug(f"Invalid profile ID retrieved from Redis: {profile_id_bytes}")
|
||||
logger.debug(
|
||||
f"Invalid profile ID retrieved from Redis: {profile_id_bytes}"
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
logger.debug(f"Invalid stream ID retrieved from Redis: {stream_id_bytes}")
|
||||
logger.debug(
|
||||
f"Invalid stream ID retrieved from Redis: {stream_id_bytes}"
|
||||
)
|
||||
|
||||
# No existing active stream, attempt to assign a new one
|
||||
has_streams_but_maxed_out = False
|
||||
has_active_profiles = False
|
||||
|
||||
# Iterate through channel streams and their profiles
|
||||
for stream in self.streams.all().order_by('channelstream__order'):
|
||||
for stream in self.streams.all().order_by("channelstream__order"):
|
||||
# Retrieve the M3U account associated with the stream.
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
logger.debug(f"Stream {stream.id} has no M3U account")
|
||||
continue
|
||||
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not default_profile:
|
||||
logger.debug(f"M3U account {m3u_account.id} has no default profile")
|
||||
if m3u_account.is_active == False:
|
||||
logger.debug(f"M3U account {m3u_account.id} is inactive, skipping.")
|
||||
continue
|
||||
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
m3u_profiles = m3u_account.profiles.filter(is_active=True)
|
||||
default_profile = next(
|
||||
(obj for obj in m3u_profiles if obj.is_default), None
|
||||
)
|
||||
|
||||
if not default_profile:
|
||||
logger.debug(f"M3U account {m3u_account.id} has no active default profile")
|
||||
continue
|
||||
|
||||
profiles = [default_profile] + [
|
||||
obj for obj in m3u_profiles if not obj.is_default
|
||||
]
|
||||
|
||||
for profile in profiles:
|
||||
# Skip inactive profiles
|
||||
if not profile.is_active:
|
||||
logger.debug(f"Skipping inactive profile {profile.id}")
|
||||
continue
|
||||
|
||||
has_active_profiles = True
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
current_connections = int(
|
||||
redis_client.get(profile_connections_key) or 0
|
||||
)
|
||||
|
||||
# Check if profile has available slots (or unlimited connections)
|
||||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
if (
|
||||
profile.max_streams == 0
|
||||
or current_connections < profile.max_streams
|
||||
):
|
||||
# Start a new stream
|
||||
redis_client.set(f"channel_stream:{self.id}", stream.id)
|
||||
redis_client.set(f"stream_profile:{stream.id}", profile.id)
|
||||
|
|
@ -357,17 +437,23 @@ class Channel(models.Model):
|
|||
if profile.max_streams > 0:
|
||||
redis_client.incr(profile_connections_key)
|
||||
|
||||
return stream.id, profile.id, None # Return newly assigned stream and matched profile
|
||||
return (
|
||||
stream.id,
|
||||
profile.id,
|
||||
None,
|
||||
) # Return newly assigned stream and matched profile
|
||||
else:
|
||||
# This profile is at max connections
|
||||
has_streams_but_maxed_out = True
|
||||
logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}")
|
||||
logger.debug(
|
||||
f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}"
|
||||
)
|
||||
|
||||
# No available streams - determine specific reason
|
||||
if has_streams_but_maxed_out:
|
||||
error_reason = "All M3U profiles have reached maximum connection limits"
|
||||
error_reason = "All active M3U profiles have reached maximum connection limits"
|
||||
elif has_active_profiles:
|
||||
error_reason = "No compatible profile found for any assigned stream"
|
||||
error_reason = "No compatible active profile found for any assigned stream"
|
||||
else:
|
||||
error_reason = "No active profiles found for any assigned stream"
|
||||
|
||||
|
|
@ -387,7 +473,9 @@ class Channel(models.Model):
|
|||
redis_client.delete(f"channel_stream:{self.id}") # Remove active stream
|
||||
|
||||
stream_id = int(stream_id)
|
||||
logger.debug(f"Found stream ID {stream_id} associated with channel stream {self.id}")
|
||||
logger.debug(
|
||||
f"Found stream ID {stream_id} associated with channel stream {self.id}"
|
||||
)
|
||||
|
||||
# Get the matched profile for cleanup
|
||||
profile_id = redis_client.get(f"stream_profile:{stream_id}")
|
||||
|
|
@ -398,7 +486,9 @@ class Channel(models.Model):
|
|||
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
|
||||
|
||||
profile_id = int(profile_id)
|
||||
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
|
||||
logger.debug(
|
||||
f"Found profile ID {profile_id} associated with stream {stream_id}"
|
||||
)
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile_id}"
|
||||
|
||||
|
|
@ -451,20 +541,26 @@ class Channel(models.Model):
|
|||
# Increment connection count for new profile
|
||||
new_profile_connections_key = f"profile_connections:{new_profile_id}"
|
||||
redis_client.incr(new_profile_connections_key)
|
||||
logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}")
|
||||
logger.info(
|
||||
f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class ChannelProfile(models.Model):
|
||||
name = models.CharField(max_length=100, unique=True)
|
||||
|
||||
|
||||
class ChannelProfileMembership(models.Model):
|
||||
channel_profile = models.ForeignKey(ChannelProfile, on_delete=models.CASCADE)
|
||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
||||
enabled = models.BooleanField(default=True) # Track if the channel is enabled for this group
|
||||
enabled = models.BooleanField(
|
||||
default=True
|
||||
) # Track if the channel is enabled for this group
|
||||
|
||||
class Meta:
|
||||
unique_together = ('channel_profile', 'channel')
|
||||
unique_together = ("channel_profile", "channel")
|
||||
|
||||
|
||||
class ChannelStream(models.Model):
|
||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
||||
|
|
@ -472,26 +568,45 @@ class ChannelStream(models.Model):
|
|||
order = models.PositiveIntegerField(default=0) # Ordering field
|
||||
|
||||
class Meta:
|
||||
ordering = ['order'] # Ensure streams are retrieved in order
|
||||
ordering = ["order"] # Ensure streams are retrieved in order
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['channel', 'stream'], name='unique_channel_stream')
|
||||
models.UniqueConstraint(
|
||||
fields=["channel", "stream"], name="unique_channel_stream"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class ChannelGroupM3UAccount(models.Model):
|
||||
channel_group = models.ForeignKey(
|
||||
ChannelGroup,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='m3u_account'
|
||||
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_accounts"
|
||||
)
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='channel_group'
|
||||
M3UAccount, on_delete=models.CASCADE, related_name="channel_group"
|
||||
)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
enabled = models.BooleanField(default=True)
|
||||
auto_channel_sync = models.BooleanField(
|
||||
default=False,
|
||||
help_text='Automatically create/delete channels to match streams in this group'
|
||||
)
|
||||
auto_sync_channel_start = models.FloatField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='Starting channel number for auto-created channels in this group'
|
||||
)
|
||||
last_seen = models.DateTimeField(
|
||||
default=datetime.now,
|
||||
db_index=True,
|
||||
help_text='Last time this group was seen in the M3U source during a refresh'
|
||||
)
|
||||
is_stale = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('channel_group', 'm3u_account')
|
||||
unique_together = ("channel_group", "m3u_account")
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})"
|
||||
|
|
@ -504,12 +619,47 @@ class Logo(models.Model):
|
|||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Recording(models.Model):
|
||||
channel = models.ForeignKey("Channel", on_delete=models.CASCADE, related_name="recordings")
|
||||
channel = models.ForeignKey(
|
||||
"Channel", on_delete=models.CASCADE, related_name="recordings"
|
||||
)
|
||||
start_time = models.DateTimeField()
|
||||
end_time = models.DateTimeField()
|
||||
task_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
|
||||
|
||||
|
||||
class RecurringRecordingRule(models.Model):
|
||||
"""Rule describing a recurring manual DVR schedule."""
|
||||
|
||||
channel = models.ForeignKey(
|
||||
"Channel",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="recurring_rules",
|
||||
)
|
||||
days_of_week = models.JSONField(default=list)
|
||||
start_time = models.TimeField()
|
||||
end_time = models.TimeField()
|
||||
enabled = models.BooleanField(default=True)
|
||||
name = models.CharField(max_length=255, blank=True)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
end_date = models.DateField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["channel", "start_time"]
|
||||
|
||||
def __str__(self):
|
||||
channel_name = getattr(self.channel, "name", str(self.channel_id))
|
||||
return f"Recurring rule for {channel_name}"
|
||||
|
||||
def cleaned_days(self):
|
||||
try:
|
||||
return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6})
|
||||
except Exception:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -1,107 +1,234 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo, ChannelProfile, ChannelProfileMembership, Recording
|
||||
from .models import (
|
||||
Stream,
|
||||
Channel,
|
||||
ChannelGroup,
|
||||
ChannelStream,
|
||||
ChannelGroupM3UAccount,
|
||||
Logo,
|
||||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from apps.epg.serializers import EPGDataSerializer
|
||||
from core.models import StreamProfile
|
||||
from apps.epg.models import EPGData
|
||||
from django.urls import reverse
|
||||
from rest_framework import serializers
|
||||
from django.utils import timezone
|
||||
from core.utils import validate_flexible_url
|
||||
|
||||
|
||||
class LogoSerializer(serializers.ModelSerializer):
|
||||
cache_url = serializers.SerializerMethodField()
|
||||
channel_count = serializers.SerializerMethodField()
|
||||
is_used = serializers.SerializerMethodField()
|
||||
channel_names = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Logo
|
||||
fields = ['id', 'name', 'url', 'cache_url']
|
||||
fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"]
|
||||
|
||||
def validate_url(self, value):
|
||||
"""Validate that the URL is unique for creation or update"""
|
||||
if self.instance and self.instance.url == value:
|
||||
return value
|
||||
|
||||
if Logo.objects.filter(url=value).exists():
|
||||
raise serializers.ValidationError("A logo with this URL already exists.")
|
||||
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Handle logo creation with proper URL validation"""
|
||||
return Logo.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Handle logo updates"""
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def get_cache_url(self, obj):
|
||||
# return f"/api/channels/logos/{obj.id}/cache/"
|
||||
request = self.context.get('request')
|
||||
request = self.context.get("request")
|
||||
if request:
|
||||
return request.build_absolute_uri(reverse('api:channels:logo-cache', args=[obj.id]))
|
||||
return reverse('api:channels:logo-cache', args=[obj.id])
|
||||
return request.build_absolute_uri(
|
||||
reverse("api:channels:logo-cache", args=[obj.id])
|
||||
)
|
||||
return reverse("api:channels:logo-cache", args=[obj.id])
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get the number of channels using this logo"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_is_used(self, obj):
|
||||
"""Check if this logo is used by any channels"""
|
||||
return obj.channels.exists()
|
||||
|
||||
def get_channel_names(self, obj):
|
||||
"""Get the names of channels using this logo (limited to first 5)"""
|
||||
names = []
|
||||
|
||||
# Get channel names
|
||||
channels = obj.channels.all()[:5]
|
||||
for channel in channels:
|
||||
names.append(f"Channel: {channel.name}")
|
||||
|
||||
# Calculate total count for "more" message
|
||||
total_count = self.get_channel_count(obj)
|
||||
if total_count > 5:
|
||||
names.append(f"...and {total_count - 5} more")
|
||||
|
||||
return names
|
||||
|
||||
|
||||
#
|
||||
# Stream
|
||||
#
|
||||
class StreamSerializer(serializers.ModelSerializer):
|
||||
url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url]
|
||||
)
|
||||
stream_profile_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=StreamProfile.objects.all(),
|
||||
source='stream_profile',
|
||||
source="stream_profile",
|
||||
allow_null=True,
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
read_only_fields = ['is_custom', 'm3u_account', 'stream_hash']
|
||||
read_only_fields = ["is_custom", "m3u_account", "stream_hash"]
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
'id',
|
||||
'name',
|
||||
'url',
|
||||
'm3u_account', # Uncomment if using M3U fields
|
||||
'logo_url',
|
||||
'tvg_id',
|
||||
'local_file',
|
||||
'current_viewers',
|
||||
'updated_at',
|
||||
'stream_profile_id',
|
||||
'is_custom',
|
||||
'channel_group',
|
||||
'stream_hash',
|
||||
"id",
|
||||
"name",
|
||||
"url",
|
||||
"m3u_account", # Uncomment if using M3U fields
|
||||
"logo_url",
|
||||
"tvg_id",
|
||||
"local_file",
|
||||
"current_viewers",
|
||||
"updated_at",
|
||||
"last_seen",
|
||||
"is_stale",
|
||||
"stream_profile_id",
|
||||
"is_custom",
|
||||
"channel_group",
|
||||
"stream_hash",
|
||||
"stream_stats",
|
||||
"stream_stats_updated_at",
|
||||
]
|
||||
|
||||
def get_fields(self):
|
||||
fields = super().get_fields()
|
||||
|
||||
# Unable to edit specific properties if this stream was created from an M3U account
|
||||
if self.instance and getattr(self.instance, 'm3u_account', None) and not self.instance.is_custom:
|
||||
fields['id'].read_only = True
|
||||
fields['name'].read_only = True
|
||||
fields['url'].read_only = True
|
||||
fields['m3u_account'].read_only = True
|
||||
fields['tvg_id'].read_only = True
|
||||
fields['channel_group'].read_only = True
|
||||
|
||||
if (
|
||||
self.instance
|
||||
and getattr(self.instance, "m3u_account", None)
|
||||
and not self.instance.is_custom
|
||||
):
|
||||
fields["id"].read_only = True
|
||||
fields["name"].read_only = True
|
||||
fields["url"].read_only = True
|
||||
fields["m3u_account"].read_only = True
|
||||
fields["tvg_id"].read_only = True
|
||||
fields["channel_group"].read_only = True
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
m3u_accounts = serializers.IntegerField(source="m3u_accounts.id", read_only=True)
|
||||
enabled = serializers.BooleanField()
|
||||
auto_channel_sync = serializers.BooleanField(default=False)
|
||||
auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False)
|
||||
custom_properties = serializers.JSONField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties", "is_stale", "last_seen"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
return data
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# Accept both dict and JSON string for custom_properties (for backward compatibility)
|
||||
val = data.get("custom_properties")
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
data["custom_properties"] = json.loads(val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
#
|
||||
# Channel Group
|
||||
#
|
||||
class ChannelGroupSerializer(serializers.ModelSerializer):
|
||||
channel_count = serializers.SerializerMethodField()
|
||||
m3u_account_count = serializers.SerializerMethodField()
|
||||
m3u_accounts = ChannelGroupM3UAccountSerializer(
|
||||
many=True,
|
||||
read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroup
|
||||
fields = ['id', 'name']
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get count of channels in this group"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_m3u_account_count(self, obj):
|
||||
"""Get count of M3U accounts associated with this group"""
|
||||
return obj.m3u_accounts.count()
|
||||
|
||||
|
||||
class ChannelProfileSerializer(serializers.ModelSerializer):
|
||||
channels = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ChannelProfile
|
||||
fields = ['id', 'name', 'channels']
|
||||
fields = ["id", "name", "channels"]
|
||||
|
||||
def get_channels(self, obj):
|
||||
memberships = ChannelProfileMembership.objects.filter(channel_profile=obj, enabled=True)
|
||||
return [
|
||||
membership.channel.id
|
||||
for membership in memberships
|
||||
]
|
||||
memberships = ChannelProfileMembership.objects.filter(
|
||||
channel_profile=obj, enabled=True
|
||||
)
|
||||
return [membership.channel.id for membership in memberships]
|
||||
|
||||
|
||||
class ChannelProfileMembershipSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ChannelProfileMembership
|
||||
fields = ['channel', 'enabled']
|
||||
fields = ["channel", "enabled"]
|
||||
|
||||
|
||||
class ChanneProfilelMembershipUpdateSerializer(serializers.Serializer):
|
||||
channel_id = serializers.IntegerField() # Ensure channel_id is an integer
|
||||
enabled = serializers.BooleanField()
|
||||
|
||||
|
||||
class BulkChannelProfileMembershipSerializer(serializers.Serializer):
|
||||
channels = serializers.ListField(
|
||||
child=ChanneProfilelMembershipUpdateSerializer(), # Use the nested serializer
|
||||
allow_empty=False
|
||||
allow_empty=False,
|
||||
)
|
||||
|
||||
def validate_channels(self, value):
|
||||
|
|
@ -109,16 +236,20 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer):
|
|||
raise serializers.ValidationError("At least one channel must be provided.")
|
||||
return value
|
||||
|
||||
|
||||
#
|
||||
# Channel
|
||||
#
|
||||
class ChannelSerializer(serializers.ModelSerializer):
|
||||
# Show nested group data, or ID
|
||||
channel_number = serializers.IntegerField(allow_null=True, required=False)
|
||||
# Ensure channel_number is explicitly typed as FloatField and properly validated
|
||||
channel_number = serializers.FloatField(
|
||||
allow_null=True,
|
||||
required=False,
|
||||
error_messages={"invalid": "Channel number must be a valid decimal number."},
|
||||
)
|
||||
channel_group_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ChannelGroup.objects.all(),
|
||||
source="channel_group",
|
||||
required=False
|
||||
queryset=ChannelGroup.objects.all(), source="channel_group", required=False
|
||||
)
|
||||
epg_data_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=EPGData.objects.all(),
|
||||
|
|
@ -129,64 +260,88 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
|
||||
stream_profile_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=StreamProfile.objects.all(),
|
||||
source='stream_profile',
|
||||
allow_null=True,
|
||||
required=False
|
||||
)
|
||||
|
||||
streams = serializers.PrimaryKeyRelatedField(queryset=Stream.objects.all(), many=True, required=False)
|
||||
|
||||
logo_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Logo.objects.all(),
|
||||
source='logo',
|
||||
source="stream_profile",
|
||||
allow_null=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
streams = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Stream.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
logo_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Logo.objects.all(),
|
||||
source="logo",
|
||||
allow_null=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
auto_created_by_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Channel
|
||||
fields = [
|
||||
'id',
|
||||
'channel_number',
|
||||
'name',
|
||||
'channel_group_id',
|
||||
'tvg_id',
|
||||
'epg_data_id',
|
||||
'streams',
|
||||
'stream_profile_id',
|
||||
'uuid',
|
||||
'logo_id',
|
||||
"id",
|
||||
"channel_number",
|
||||
"name",
|
||||
"channel_group_id",
|
||||
"tvg_id",
|
||||
"tvc_guide_stationid",
|
||||
"epg_data_id",
|
||||
"streams",
|
||||
"stream_profile_id",
|
||||
"uuid",
|
||||
"logo_id",
|
||||
"user_level",
|
||||
"auto_created",
|
||||
"auto_created_by",
|
||||
"auto_created_by_name",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
include_streams = self.context.get('include_streams', False)
|
||||
include_streams = self.context.get("include_streams", False)
|
||||
|
||||
if include_streams:
|
||||
self.fields['streams'] = serializers.SerializerMethodField()
|
||||
|
||||
return super().to_representation(instance)
|
||||
self.fields["streams"] = serializers.SerializerMethodField()
|
||||
return super().to_representation(instance)
|
||||
else:
|
||||
# Fix: For PATCH/PUT responses, ensure streams are ordered
|
||||
representation = super().to_representation(instance)
|
||||
if "streams" in representation:
|
||||
representation["streams"] = list(
|
||||
instance.streams.all()
|
||||
.order_by("channelstream__order")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
return representation
|
||||
|
||||
def get_logo(self, obj):
|
||||
return LogoSerializer(obj.logo).data
|
||||
|
||||
def get_streams(self, obj):
|
||||
"""Retrieve ordered stream IDs for GET requests."""
|
||||
return StreamSerializer(obj.streams.all().order_by('channelstream__order'), many=True).data
|
||||
return StreamSerializer(
|
||||
obj.streams.all().order_by("channelstream__order"), many=True
|
||||
).data
|
||||
|
||||
def create(self, validated_data):
|
||||
streams = validated_data.pop('streams', [])
|
||||
channel_number = validated_data.pop('channel_number', Channel.get_next_available_channel_number())
|
||||
streams = validated_data.pop("streams", [])
|
||||
channel_number = validated_data.pop(
|
||||
"channel_number", Channel.get_next_available_channel_number()
|
||||
)
|
||||
validated_data["channel_number"] = channel_number
|
||||
channel = Channel.objects.create(**validated_data)
|
||||
|
||||
# Add streams in the specified order
|
||||
for index, stream in enumerate(streams):
|
||||
ChannelStream.objects.create(channel=channel, stream_id=stream.id, order=index)
|
||||
ChannelStream.objects.create(
|
||||
channel=channel, stream_id=stream.id, order=index
|
||||
)
|
||||
|
||||
return channel
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
streams = validated_data.pop('streams', None)
|
||||
streams = validated_data.pop("streams", None)
|
||||
|
||||
# Update standard fields
|
||||
for attr, value in validated_data.items():
|
||||
|
|
@ -197,8 +352,7 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
if streams is not None:
|
||||
# Normalize stream IDs
|
||||
normalized_ids = [
|
||||
stream.id if hasattr(stream, "id") else stream
|
||||
for stream in streams
|
||||
stream.id if hasattr(stream, "id") else stream for stream in streams
|
||||
]
|
||||
print(normalized_ids)
|
||||
|
||||
|
|
@ -225,40 +379,85 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
cs.save(update_fields=["order"])
|
||||
else:
|
||||
ChannelStream.objects.create(
|
||||
channel=instance,
|
||||
stream_id=stream_id,
|
||||
order=order
|
||||
channel=instance, stream_id=stream_id, order=order
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def validate_channel_number(self, value):
|
||||
"""Ensure channel_number is properly processed as a float"""
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
try:
|
||||
# Ensure it's processed as a float
|
||||
return float(value)
|
||||
except (ValueError, TypeError):
|
||||
raise serializers.ValidationError(
|
||||
"Channel number must be a valid decimal number."
|
||||
)
|
||||
|
||||
def validate_stream_profile(self, value):
|
||||
"""Handle special case where empty/0 values mean 'use default' (null)"""
|
||||
if value == '0' or value == 0 or value == '' or value is None:
|
||||
if value == "0" or value == 0 or value == "" or value is None:
|
||||
return None
|
||||
return value # PrimaryKeyRelatedField will handle the conversion to object
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
enabled = serializers.BooleanField()
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ['id', 'channel_group', 'enabled']
|
||||
|
||||
# Optionally, if you only need the id of the ChannelGroup, you can customize it like this:
|
||||
# channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all())
|
||||
def get_auto_created_by_name(self, obj):
|
||||
"""Get the name of the M3U account that auto-created this channel."""
|
||||
if obj.auto_created_by:
|
||||
return obj.auto_created_by.name
|
||||
return None
|
||||
|
||||
|
||||
class RecordingSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Recording
|
||||
fields = '__all__'
|
||||
read_only_fields = ['task_id']
|
||||
fields = "__all__"
|
||||
read_only_fields = ["task_id"]
|
||||
|
||||
def validate(self, data):
|
||||
start_time = data.get('start_time')
|
||||
end_time = data.get('end_time')
|
||||
from core.models import CoreSettings
|
||||
start_time = data.get("start_time")
|
||||
end_time = data.get("end_time")
|
||||
|
||||
if start_time and timezone.is_naive(start_time):
|
||||
start_time = timezone.make_aware(start_time, timezone.get_current_timezone())
|
||||
data["start_time"] = start_time
|
||||
if end_time and timezone.is_naive(end_time):
|
||||
end_time = timezone.make_aware(end_time, timezone.get_current_timezone())
|
||||
data["end_time"] = end_time
|
||||
|
||||
# If this is an EPG-based recording (program provided), apply global pre/post offsets
|
||||
try:
|
||||
cp = data.get("custom_properties") or {}
|
||||
is_epg_based = isinstance(cp, dict) and isinstance(cp.get("program"), (dict,))
|
||||
except Exception:
|
||||
is_epg_based = False
|
||||
|
||||
if is_epg_based and start_time and end_time:
|
||||
try:
|
||||
pre_min = int(CoreSettings.get_dvr_pre_offset_minutes())
|
||||
except Exception:
|
||||
pre_min = 0
|
||||
try:
|
||||
post_min = int(CoreSettings.get_dvr_post_offset_minutes())
|
||||
except Exception:
|
||||
post_min = 0
|
||||
from datetime import timedelta
|
||||
try:
|
||||
if pre_min and pre_min > 0:
|
||||
start_time = start_time - timedelta(minutes=pre_min)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if post_min and post_min > 0:
|
||||
end_time = end_time + timedelta(minutes=post_min)
|
||||
except Exception:
|
||||
pass
|
||||
# write back adjusted times so scheduling uses them
|
||||
data["start_time"] = start_time
|
||||
data["end_time"] = end_time
|
||||
|
||||
now = timezone.now() # timezone-aware current time
|
||||
|
||||
|
|
@ -267,8 +466,61 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
|
||||
if start_time < now:
|
||||
# Optional: Adjust start_time if it's in the past but end_time is in the future
|
||||
data['start_time'] = now # or: timezone.now() + timedelta(seconds=1)
|
||||
if end_time <= data['start_time']:
|
||||
data["start_time"] = now # or: timezone.now() + timedelta(seconds=1)
|
||||
if end_time <= data["start_time"]:
|
||||
raise serializers.ValidationError("End time must be after start time.")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RecurringRecordingRuleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = RecurringRecordingRule
|
||||
fields = "__all__"
|
||||
read_only_fields = ["created_at", "updated_at"]
|
||||
|
||||
def validate_days_of_week(self, value):
|
||||
if not value:
|
||||
raise serializers.ValidationError("Select at least one day of the week")
|
||||
cleaned = []
|
||||
for entry in value:
|
||||
try:
|
||||
iv = int(entry)
|
||||
except (TypeError, ValueError):
|
||||
raise serializers.ValidationError("Days of week must be integers 0-6")
|
||||
if iv < 0 or iv > 6:
|
||||
raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)")
|
||||
cleaned.append(iv)
|
||||
return sorted(set(cleaned))
|
||||
|
||||
def validate(self, attrs):
|
||||
start = attrs.get("start_time") or getattr(self.instance, "start_time", None)
|
||||
end = attrs.get("end_time") or getattr(self.instance, "end_time", None)
|
||||
start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None)
|
||||
end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None)
|
||||
if start_date is None:
|
||||
existing_start = getattr(self.instance, "start_date", None)
|
||||
if existing_start is None:
|
||||
raise serializers.ValidationError("Start date is required")
|
||||
if start_date and end_date and end_date < start_date:
|
||||
raise serializers.ValidationError("End date must be on or after start date")
|
||||
if end_date is None:
|
||||
existing_end = getattr(self.instance, "end_date", None)
|
||||
if existing_end is None:
|
||||
raise serializers.ValidationError("End date is required")
|
||||
if start and end and start_date and end_date:
|
||||
start_dt = datetime.combine(start_date, start)
|
||||
end_dt = datetime.combine(end_date, end)
|
||||
if end_dt <= start_dt:
|
||||
raise serializers.ValidationError("End datetime must be after start datetime")
|
||||
elif start and end and end == start:
|
||||
raise serializers.ValidationError("End time must be different from start time")
|
||||
# Normalize empty strings to None for dates
|
||||
if attrs.get("end_date") == "":
|
||||
attrs["end_date"] = None
|
||||
if attrs.get("start_date") == "":
|
||||
attrs["start_date"] = None
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
return super().create(validated_data)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from .models import Channel, Stream, ChannelProfile, ChannelProfileMembership, R
|
|||
from apps.m3u.models import M3UAccount
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
import logging, requests, time
|
||||
from .tasks import run_recording
|
||||
from .tasks import run_recording, prefetch_recording_artwork
|
||||
from django.utils.timezone import now, is_aware, make_aware
|
||||
from datetime import timedelta
|
||||
|
||||
|
|
@ -45,6 +45,20 @@ def set_default_m3u_account(sender, instance, **kwargs):
|
|||
else:
|
||||
raise ValueError("No default M3UAccount found.")
|
||||
|
||||
@receiver(post_save, sender=Stream)
|
||||
def generate_custom_stream_hash(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Generate a stable stream_hash for custom streams after creation.
|
||||
Uses the stream's ID to ensure the hash never changes even if name/url is edited.
|
||||
"""
|
||||
if instance.is_custom and not instance.stream_hash and created:
|
||||
import hashlib
|
||||
# Use stream ID for a stable, unique hash that never changes
|
||||
unique_string = f"custom_stream_{instance.id}"
|
||||
instance.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
# Use update to avoid triggering signals again
|
||||
Stream.objects.filter(id=instance.id).update(stream_hash=instance.stream_hash)
|
||||
|
||||
@receiver(post_save, sender=Channel)
|
||||
def refresh_epg_programs(sender, instance, created, **kwargs):
|
||||
"""
|
||||
|
|
@ -62,15 +76,6 @@ def refresh_epg_programs(sender, instance, created, **kwargs):
|
|||
logger.info(f"New channel {instance.id} ({instance.name}) created with EPG data, refreshing program data")
|
||||
parse_programs_for_tvg_id.delay(instance.epg_data.id)
|
||||
|
||||
@receiver(post_save, sender=Channel)
|
||||
def add_new_channel_to_groups(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=instance)
|
||||
for profile in profiles
|
||||
])
|
||||
|
||||
@receiver(post_save, sender=ChannelProfile)
|
||||
def create_profile_memberships(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
|
|
@ -82,8 +87,9 @@ def create_profile_memberships(sender, instance, created, **kwargs):
|
|||
|
||||
def schedule_recording_task(instance):
|
||||
eta = instance.start_time
|
||||
# Pass recording_id first so task can persist metadata to the correct row
|
||||
task = run_recording.apply_async(
|
||||
args=[instance.channel_id, str(instance.start_time), str(instance.end_time)],
|
||||
args=[instance.id, instance.channel_id, str(instance.start_time), str(instance.end_time)],
|
||||
eta=eta
|
||||
)
|
||||
return task.id
|
||||
|
|
@ -132,6 +138,11 @@ def schedule_task_on_save(sender, instance, created, **kwargs):
|
|||
instance.save(update_fields=['task_id'])
|
||||
else:
|
||||
print("Start time is in the past. Not scheduling.")
|
||||
# Kick off poster/artwork prefetch to enrich Upcoming cards
|
||||
try:
|
||||
prefetch_recording_artwork.apply_async(args=[instance.id], countdown=1)
|
||||
except Exception as e:
|
||||
print("Error scheduling artwork prefetch:", e)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print("Error in post_save signal:", e)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
0
apps/channels/tests/__init__.py
Normal file
0
apps/channels/tests/__init__.py
Normal file
211
apps/channels/tests/test_channel_api.py
Normal file
211
apps/channels/tests/test_channel_api.py
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
from django.test import TestCase
|
||||
from django.contrib.auth import get_user_model
|
||||
from rest_framework.test import APIClient
|
||||
from rest_framework import status
|
||||
|
||||
from apps.channels.models import Channel, ChannelGroup
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class ChannelBulkEditAPITests(TestCase):
|
||||
def setUp(self):
|
||||
# Create a test admin user (user_level >= 10) and authenticate
|
||||
self.user = User.objects.create_user(username="testuser", password="testpass123")
|
||||
self.user.user_level = 10 # Set admin level
|
||||
self.user.save()
|
||||
self.client = APIClient()
|
||||
self.client.force_authenticate(user=self.user)
|
||||
self.bulk_edit_url = "/api/channels/channels/edit/bulk/"
|
||||
|
||||
# Create test channel group
|
||||
self.group1 = ChannelGroup.objects.create(name="Test Group 1")
|
||||
self.group2 = ChannelGroup.objects.create(name="Test Group 2")
|
||||
|
||||
# Create test channels
|
||||
self.channel1 = Channel.objects.create(
|
||||
channel_number=1.0,
|
||||
name="Channel 1",
|
||||
tvg_id="channel1",
|
||||
channel_group=self.group1
|
||||
)
|
||||
self.channel2 = Channel.objects.create(
|
||||
channel_number=2.0,
|
||||
name="Channel 2",
|
||||
tvg_id="channel2",
|
||||
channel_group=self.group1
|
||||
)
|
||||
self.channel3 = Channel.objects.create(
|
||||
channel_number=3.0,
|
||||
name="Channel 3",
|
||||
tvg_id="channel3"
|
||||
)
|
||||
|
||||
def test_bulk_edit_success(self):
|
||||
"""Test successful bulk update of multiple channels"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "Updated Channel 1"},
|
||||
{"id": self.channel2.id, "name": "Updated Channel 2", "channel_number": 22.0},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
self.assertEqual(len(response.data["channels"]), 2)
|
||||
|
||||
# Verify database changes
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel2.refresh_from_db()
|
||||
self.assertEqual(self.channel1.name, "Updated Channel 1")
|
||||
self.assertEqual(self.channel2.name, "Updated Channel 2")
|
||||
self.assertEqual(self.channel2.channel_number, 22.0)
|
||||
|
||||
def test_bulk_edit_with_empty_validated_data_first(self):
|
||||
"""
|
||||
Test the bug fix: when first channel has empty validated_data.
|
||||
This was causing: ValueError: Field names must be given to bulk_update()
|
||||
"""
|
||||
# Create a channel with data that will be "unchanged" (empty validated_data)
|
||||
# We'll send the same data it already has
|
||||
data = [
|
||||
# First channel: no actual changes (this would create empty validated_data)
|
||||
{"id": self.channel1.id},
|
||||
# Second channel: has changes
|
||||
{"id": self.channel2.id, "name": "Updated Channel 2"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should not crash with ValueError
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
|
||||
# Verify the channel with changes was updated
|
||||
self.channel2.refresh_from_db()
|
||||
self.assertEqual(self.channel2.name, "Updated Channel 2")
|
||||
|
||||
def test_bulk_edit_all_empty_updates(self):
|
||||
"""Test when all channels have empty updates (no actual changes)"""
|
||||
data = [
|
||||
{"id": self.channel1.id},
|
||||
{"id": self.channel2.id},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should succeed without calling bulk_update
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 2 channels")
|
||||
|
||||
def test_bulk_edit_mixed_fields(self):
|
||||
"""Test bulk update where different channels update different fields"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "New Name 1"},
|
||||
{"id": self.channel2.id, "channel_number": 99.0},
|
||||
{"id": self.channel3.id, "tvg_id": "new_tvg_id", "name": "New Name 3"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 3 channels")
|
||||
|
||||
# Verify all updates
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel2.refresh_from_db()
|
||||
self.channel3.refresh_from_db()
|
||||
|
||||
self.assertEqual(self.channel1.name, "New Name 1")
|
||||
self.assertEqual(self.channel2.channel_number, 99.0)
|
||||
self.assertEqual(self.channel3.tvg_id, "new_tvg_id")
|
||||
self.assertEqual(self.channel3.name, "New Name 3")
|
||||
|
||||
def test_bulk_edit_with_channel_group(self):
|
||||
"""Test bulk update with channel_group_id changes"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "channel_group_id": self.group2.id},
|
||||
{"id": self.channel3.id, "channel_group_id": self.group1.id},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Verify group changes
|
||||
self.channel1.refresh_from_db()
|
||||
self.channel3.refresh_from_db()
|
||||
self.assertEqual(self.channel1.channel_group, self.group2)
|
||||
self.assertEqual(self.channel3.channel_group, self.group1)
|
||||
|
||||
def test_bulk_edit_nonexistent_channel(self):
|
||||
"""Test bulk update with a channel that doesn't exist"""
|
||||
nonexistent_id = 99999
|
||||
data = [
|
||||
{"id": nonexistent_id, "name": "Should Fail"},
|
||||
{"id": self.channel1.id, "name": "Should Still Update"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should return 400 with errors
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("errors", response.data)
|
||||
self.assertEqual(len(response.data["errors"]), 1)
|
||||
self.assertEqual(response.data["errors"][0]["channel_id"], nonexistent_id)
|
||||
self.assertEqual(response.data["errors"][0]["error"], "Channel not found")
|
||||
|
||||
# The valid channel should still be updated
|
||||
self.assertEqual(response.data["updated_count"], 1)
|
||||
|
||||
def test_bulk_edit_validation_error(self):
|
||||
"""Test bulk update with invalid data (validation error)"""
|
||||
data = [
|
||||
{"id": self.channel1.id, "channel_number": "invalid_number"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Should return 400 with validation errors
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("errors", response.data)
|
||||
self.assertEqual(len(response.data["errors"]), 1)
|
||||
self.assertIn("channel_number", response.data["errors"][0]["errors"])
|
||||
|
||||
def test_bulk_edit_empty_channel_updates(self):
|
||||
"""Test bulk update with empty list"""
|
||||
data = []
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
# Empty list is accepted and returns success with 0 updates
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["message"], "Successfully updated 0 channels")
|
||||
|
||||
def test_bulk_edit_missing_channel_updates(self):
|
||||
"""Test bulk update without proper format (dict instead of list)"""
|
||||
data = {"channel_updates": {}}
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertEqual(response.data["error"], "Expected a list of channel updates")
|
||||
|
||||
def test_bulk_edit_preserves_other_fields(self):
|
||||
"""Test that bulk update only changes specified fields"""
|
||||
original_channel_number = self.channel1.channel_number
|
||||
original_tvg_id = self.channel1.tvg_id
|
||||
|
||||
data = [
|
||||
{"id": self.channel1.id, "name": "Only Name Changed"},
|
||||
]
|
||||
|
||||
response = self.client.patch(self.bulk_edit_url, data, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Verify only name changed, other fields preserved
|
||||
self.channel1.refresh_from_db()
|
||||
self.assertEqual(self.channel1.name, "Only Name Changed")
|
||||
self.assertEqual(self.channel1.channel_number, original_channel_number)
|
||||
self.assertEqual(self.channel1.tvg_id, original_tvg_id)
|
||||
40
apps/channels/tests/test_recurring_rules.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from datetime import datetime, timedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.channels.models import Channel, RecurringRecordingRule, Recording
|
||||
from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl
|
||||
|
||||
|
||||
class RecurringRecordingRuleTasksTests(TestCase):
|
||||
def test_sync_recurring_rule_creates_and_purges_recordings(self):
|
||||
now = timezone.now()
|
||||
channel = Channel.objects.create(channel_number=1, name='Test Channel')
|
||||
|
||||
start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0)
|
||||
end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0)
|
||||
|
||||
rule = RecurringRecordingRule.objects.create(
|
||||
channel=channel,
|
||||
days_of_week=[now.weekday()],
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
)
|
||||
|
||||
created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1)
|
||||
self.assertEqual(created, 1)
|
||||
|
||||
recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first()
|
||||
self.assertIsNotNone(recording)
|
||||
self.assertEqual(recording.channel, channel)
|
||||
self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id)
|
||||
|
||||
expected_start = timezone.make_aware(
|
||||
datetime.combine(recording.start_time.date(), start_time),
|
||||
timezone.get_current_timezone(),
|
||||
)
|
||||
self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60)
|
||||
|
||||
removed = purge_recurring_rule_impl(rule.id)
|
||||
self.assertEqual(removed, 1)
|
||||
self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists())
|
||||
|
|
@ -2,47 +2,66 @@ import logging, os
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import action
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from .models import EPGSource, ProgramData, EPGData # Added ProgramData
|
||||
from .serializers import ProgramDataSerializer, EPGSourceSerializer, EPGDataSerializer # Updated serializer
|
||||
from .serializers import (
|
||||
ProgramDataSerializer,
|
||||
EPGSourceSerializer,
|
||||
EPGDataSerializer,
|
||||
) # Updated serializer
|
||||
from .tasks import refresh_epg_data
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
permission_classes_by_method,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 1) EPG Source API (CRUD)
|
||||
# ─────────────────────────────
|
||||
class EPGSourceViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for EPG sources"""
|
||||
"""
|
||||
API endpoint that allows EPG sources to be viewed or edited.
|
||||
"""
|
||||
|
||||
queryset = EPGSource.objects.all()
|
||||
serializer_class = EPGSourceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
logger.debug("Listing all EPG sources.")
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
@action(detail=False, methods=["post"])
|
||||
def upload(self, request):
|
||||
if 'file' not in request.FILES:
|
||||
return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "file" not in request.FILES:
|
||||
return Response(
|
||||
{"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
file = request.FILES['file']
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/epgs', file_name)
|
||||
file_path = os.path.join("/data/uploads/epgs", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
new_obj_data = request.data.copy()
|
||||
new_obj_data['file_path'] = file_path
|
||||
new_obj_data["file_path"] = file_path
|
||||
|
||||
serializer = self.get_serializer(data=new_obj_data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
|
@ -50,72 +69,241 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
|
|||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
"""Handle partial updates with special logic for is_active field"""
|
||||
instance = self.get_object()
|
||||
|
||||
# Check if we're toggling is_active
|
||||
if (
|
||||
"is_active" in request.data
|
||||
and instance.is_active != request.data["is_active"]
|
||||
):
|
||||
# Set appropriate status based on new is_active value
|
||||
if request.data["is_active"]:
|
||||
request.data["status"] = "idle"
|
||||
else:
|
||||
request.data["status"] = "disabled"
|
||||
|
||||
# Continue with regular partial update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 2) Program API (CRUD)
|
||||
# ─────────────────────────────
|
||||
class ProgramViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for EPG programs"""
|
||||
|
||||
queryset = ProgramData.objects.all()
|
||||
serializer_class = ProgramDataSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
logger.debug("Listing all EPG programs.")
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 3) EPG Grid View
|
||||
# ─────────────────────────────
|
||||
class EPGGridAPIView(APIView):
|
||||
"""Returns all programs airing in the next 24 hours including currently running ones and recent ones"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve programs from the previous hour, currently running and upcoming for the next 24 hours",
|
||||
responses={200: ProgramDataSerializer(many=True)}
|
||||
responses={200: ProgramDataSerializer(many=True)},
|
||||
)
|
||||
def get(self, request, format=None):
|
||||
# Use current time instead of midnight
|
||||
now = timezone.now()
|
||||
one_hour_ago = now - timedelta(hours=1)
|
||||
twenty_four_hours_later = now + timedelta(hours=24)
|
||||
logger.debug(f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}.")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}."
|
||||
)
|
||||
|
||||
# Use select_related to prefetch EPGData and include programs from the last hour
|
||||
programs = ProgramData.objects.select_related('epg').filter(
|
||||
programs = ProgramData.objects.select_related("epg").filter(
|
||||
# Programs that end after one hour ago (includes recently ended programs)
|
||||
end_time__gt=one_hour_ago,
|
||||
# AND start before the end time window
|
||||
start_time__lt=twenty_four_hours_later
|
||||
start_time__lt=twenty_four_hours_later,
|
||||
)
|
||||
count = programs.count()
|
||||
logger.debug(f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows.")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows."
|
||||
)
|
||||
|
||||
# Generate dummy programs for channels that have no EPG data
|
||||
# Generate dummy programs for channels that have no EPG data OR dummy EPG sources
|
||||
from apps.channels.models import Channel
|
||||
from apps.epg.models import EPGSource
|
||||
from django.db.models import Q
|
||||
|
||||
# Get channels with no EPG data
|
||||
# Get channels with no EPG data at all (standard dummy)
|
||||
channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True))
|
||||
channels_count = channels_without_epg.count()
|
||||
|
||||
# Log more detailed information about channels missing EPG data
|
||||
if channels_count > 0:
|
||||
# Get channels with custom dummy EPG sources (generate on-demand with patterns)
|
||||
channels_with_custom_dummy = Channel.objects.filter(
|
||||
epg_data__epg_source__source_type='dummy'
|
||||
).distinct()
|
||||
|
||||
# Log what we found
|
||||
without_count = channels_without_epg.count()
|
||||
custom_count = channels_with_custom_dummy.count()
|
||||
|
||||
if without_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg]
|
||||
logger.warning(f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
logger.debug(f"EPGGridAPIView: Found {channels_count} channels with no EPG data.")
|
||||
if custom_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy]
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG."
|
||||
)
|
||||
|
||||
# Serialize the regular programs
|
||||
serialized_programs = ProgramDataSerializer(programs, many=True).data
|
||||
|
||||
# Humorous program descriptions based on time of day - same as in output/views.py
|
||||
time_descriptions = {
|
||||
(0, 4): [
|
||||
"Late Night with {channel} - Where insomniacs unite!",
|
||||
"The 'Why Am I Still Awake?' Show on {channel}",
|
||||
"Counting Sheep - A {channel} production for the sleepless",
|
||||
],
|
||||
(4, 8): [
|
||||
"Dawn Patrol - Rise and shine with {channel}!",
|
||||
"Early Bird Special - Coffee not included",
|
||||
"Morning Zombies - Before coffee viewing on {channel}",
|
||||
],
|
||||
(8, 12): [
|
||||
"Mid-Morning Meetings - Pretend you're paying attention while watching {channel}",
|
||||
"The 'I Should Be Working' Hour on {channel}",
|
||||
"Productivity Killer - {channel}'s daytime programming",
|
||||
],
|
||||
(12, 16): [
|
||||
"Lunchtime Laziness with {channel}",
|
||||
"The Afternoon Slump - Brought to you by {channel}",
|
||||
"Post-Lunch Food Coma Theater on {channel}",
|
||||
],
|
||||
(16, 20): [
|
||||
"Rush Hour - {channel}'s alternative to traffic",
|
||||
"The 'What's For Dinner?' Debate on {channel}",
|
||||
"Evening Escapism - {channel}'s remedy for reality",
|
||||
],
|
||||
(20, 24): [
|
||||
"Prime Time Placeholder - {channel}'s finest not-programming",
|
||||
"The 'Netflix Was Too Complicated' Show on {channel}",
|
||||
"Family Argument Avoider - Courtesy of {channel}",
|
||||
],
|
||||
}
|
||||
|
||||
# Generate and append dummy programs
|
||||
dummy_programs = []
|
||||
for channel in channels_without_epg:
|
||||
# Use the channel UUID as tvg_id for dummy programs to match in the guide
|
||||
|
||||
# Import the function from output.views
|
||||
from apps.output.views import generate_dummy_programs as gen_dummy_progs
|
||||
|
||||
# Handle channels with CUSTOM dummy EPG sources (with patterns)
|
||||
for channel in channels_with_custom_dummy:
|
||||
# For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel
|
||||
# This prevents multiple channels assigned to the same dummy EPG from showing identical data
|
||||
# Each channel gets its own unique program data even if they share the same EPG source
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
# Create programs every 4 hours for the next 24 hours
|
||||
# Get the custom dummy EPG source
|
||||
epg_source = channel.epg_data.epg_source if channel.epg_data else None
|
||||
|
||||
logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Determine which name to parse based on custom properties
|
||||
name_to_parse = channel.name
|
||||
if epg_source and epg_source.custom_properties:
|
||||
custom_props = epg_source.custom_properties
|
||||
name_source = custom_props.get('name_source')
|
||||
|
||||
if name_source == 'stream':
|
||||
# Get the stream index (1-based from user, convert to 0-based)
|
||||
stream_index = custom_props.get('stream_index', 1) - 1
|
||||
|
||||
# Get streams ordered by channelstream order
|
||||
channel_streams = channel.streams.all().order_by('channelstream__order')
|
||||
|
||||
if channel_streams.exists() and 0 <= stream_index < channel_streams.count():
|
||||
stream = list(channel_streams)[stream_index]
|
||||
name_to_parse = stream.name
|
||||
logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})")
|
||||
else:
|
||||
logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name")
|
||||
elif name_source == 'channel':
|
||||
logger.debug(f"Using channel name for parsing: {name_to_parse}")
|
||||
|
||||
# Generate programs using custom patterns from the dummy EPG source
|
||||
# Use the same tvg_id that will be set in the program data
|
||||
generated = gen_dummy_progs(
|
||||
channel_id=dummy_tvg_id,
|
||||
channel_name=name_to_parse,
|
||||
num_days=1,
|
||||
program_length_hours=4,
|
||||
epg_source=epg_source
|
||||
)
|
||||
|
||||
# Custom dummy should always return data (either from patterns or fallback)
|
||||
if generated:
|
||||
logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}")
|
||||
# Convert generated programs to API format
|
||||
for program in generated:
|
||||
dummy_program = {
|
||||
"id": f"dummy-custom-{channel.id}-{program['start_time'].hour}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": program['start_time'].isoformat(),
|
||||
"end_time": program['end_time'].isoformat(),
|
||||
"title": program['title'],
|
||||
"description": program['description'],
|
||||
"tvg_id": dummy_tvg_id,
|
||||
"sub_title": None,
|
||||
"custom_properties": None,
|
||||
}
|
||||
dummy_programs.append(dummy_program)
|
||||
else:
|
||||
logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Handle channels with NO EPG data (standard dummy with humorous descriptions)
|
||||
for channel in channels_without_epg:
|
||||
# For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic)
|
||||
# The frontend uses: tvgRecord?.tvg_id ?? channel.uuid
|
||||
# Since there's no EPG data, it will fall back to UUID
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Create programs every 4 hours for the next 24 hours with humorous descriptions
|
||||
for hour_offset in range(0, 24, 4):
|
||||
# Use timedelta for time arithmetic instead of replace() to avoid hour overflow
|
||||
start_time = now + timedelta(hours=hour_offset)
|
||||
|
|
@ -123,31 +311,51 @@ class EPGGridAPIView(APIView):
|
|||
start_time = start_time.replace(minute=0, second=0, microsecond=0)
|
||||
end_time = start_time + timedelta(hours=4)
|
||||
|
||||
# Get the hour for selecting a description
|
||||
hour = start_time.hour
|
||||
day = 0 # Use 0 as we're only doing 1 day
|
||||
|
||||
# Find the appropriate time slot for description
|
||||
for time_range, descriptions in time_descriptions.items():
|
||||
start_range, end_range = time_range
|
||||
if start_range <= hour < end_range:
|
||||
# Pick a description using the sum of the hour and day as seed
|
||||
# This makes it somewhat random but consistent for the same timeslot
|
||||
description = descriptions[
|
||||
(hour + day) % len(descriptions)
|
||||
].format(channel=channel.name)
|
||||
break
|
||||
else:
|
||||
# Fallback description if somehow no range matches
|
||||
description = f"Placeholder program for {channel.name} - EPG data went on vacation"
|
||||
|
||||
# Create a dummy program in the same format as regular programs
|
||||
dummy_program = {
|
||||
'id': f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
|
||||
'epg': {
|
||||
'tvg_id': dummy_tvg_id,
|
||||
'name': channel.name
|
||||
},
|
||||
'start_time': start_time.isoformat(),
|
||||
'end_time': end_time.isoformat(),
|
||||
'title': f"{channel.name}",
|
||||
'description': f"Placeholder program for {channel.name}",
|
||||
'tvg_id': dummy_tvg_id,
|
||||
'sub_title': None,
|
||||
'custom_properties': None
|
||||
"id": f"dummy-standard-{channel.id}-{hour_offset}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": start_time.isoformat(),
|
||||
"end_time": end_time.isoformat(),
|
||||
"title": f"{channel.name}",
|
||||
"description": description,
|
||||
"tvg_id": dummy_tvg_id,
|
||||
"sub_title": None,
|
||||
"custom_properties": None,
|
||||
}
|
||||
dummy_programs.append(dummy_program)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}")
|
||||
logger.error(
|
||||
f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Combine regular and dummy programs
|
||||
all_programs = list(serialized_programs) + dummy_programs
|
||||
logger.debug(f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs).")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs)."
|
||||
)
|
||||
|
||||
return Response({"data": all_programs}, status=status.HTTP_200_OK)
|
||||
|
||||
return Response({'data': all_programs}, status=status.HTTP_200_OK)
|
||||
|
||||
# ─────────────────────────────
|
||||
# 4) EPG Import View
|
||||
|
|
@ -155,15 +363,41 @@ class EPGGridAPIView(APIView):
|
|||
class EPGImportAPIView(APIView):
|
||||
"""Triggers an EPG data refresh"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers an EPG data import",
|
||||
responses={202: "EPG data import initiated"}
|
||||
responses={202: "EPG data import initiated"},
|
||||
)
|
||||
def post(self, request, format=None):
|
||||
logger.info("EPGImportAPIView: Received request to import EPG data.")
|
||||
refresh_epg_data.delay(request.data.get('id', None)) # Trigger Celery task
|
||||
epg_id = request.data.get("id", None)
|
||||
|
||||
# Check if this is a dummy EPG source
|
||||
try:
|
||||
from .models import EPGSource
|
||||
epg_source = EPGSource.objects.get(id=epg_id)
|
||||
if epg_source.source_type == 'dummy':
|
||||
logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}")
|
||||
return Response(
|
||||
{"success": False, "message": "Dummy EPG sources do not require refreshing."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except EPGSource.DoesNotExist:
|
||||
pass # Let the task handle the missing source
|
||||
|
||||
refresh_epg_data.delay(epg_id) # Trigger Celery task
|
||||
logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.")
|
||||
return Response({'success': True, 'message': 'EPG data import initiated.'}, status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{"success": True, "message": "EPG data import initiated."},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
|
|
@ -173,6 +407,13 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
"""
|
||||
API endpoint that allows EPGData objects to be viewed.
|
||||
"""
|
||||
|
||||
queryset = EPGData.objects.all()
|
||||
serializer_class = EPGDataSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0006_epgsource_refresh_interval_epgsource_refresh_task'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success')], default='idle', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='last_error',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
14
apps/epg/migrations/0010_merge_20250503_2147.py
Normal file
14
apps/epg/migrations/0010_merge_20250503_2147.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-03 21:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0007_epgsource_status_epgsource_last_error'),
|
||||
('epg', '0009_alter_epgsource_created_at_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
]
|
||||
42
apps/epg/migrations/0011_update_epgsource_fields.py
Normal file
42
apps/epg/migrations/0011_update_epgsource_fields.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-04 21:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0010_merge_20250503_2147'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Change updated_at field
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(blank=True, help_text='Time when this source was last successfully refreshed', null=True),
|
||||
),
|
||||
|
||||
# Add new last_message field
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='last_message',
|
||||
field=models.TextField(blank=True, help_text='Last status message, including success results or error information', null=True),
|
||||
),
|
||||
|
||||
# Copy data from last_error to last_message
|
||||
migrations.RunPython(
|
||||
code=lambda apps, schema_editor: apps.get_model('epg', 'EPGSource').objects.all().update(
|
||||
last_message=models.F('last_error')
|
||||
),
|
||||
reverse_code=lambda apps, schema_editor: apps.get_model('epg', 'EPGSource').objects.all().update(
|
||||
last_error=models.F('last_message')
|
||||
),
|
||||
),
|
||||
|
||||
# Remove the old field
|
||||
migrations.RemoveField(
|
||||
model_name='epgsource',
|
||||
name='last_error',
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0012_alter_epgsource_status.py
Normal file
18
apps/epg/migrations/0012_alter_epgsource_status.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-15 01:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0011_update_epgsource_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success'), ('disabled', 'Disabled')], default='idle', max_length=20),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0013_alter_epgsource_refresh_interval.py
Normal file
18
apps/epg/migrations/0013_alter_epgsource_refresh_interval.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-21 19:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0012_alter_epgsource_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='refresh_interval',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0014_epgsource_extracted_file_path.py
Normal file
18
apps/epg/migrations/0014_epgsource_extracted_file_path.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-26 15:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0013_alter_epgsource_refresh_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='extracted_file_path',
|
||||
field=models.CharField(blank=True, help_text='Path to extracted XML file after decompression', max_length=1024, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0014_epgsource_extracted_file_path'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='programdata',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-16 22:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0015_alter_programdata_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgdata',
|
||||
name='icon_url',
|
||||
field=models.URLField(blank=True, max_length=500, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-24 21:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0016_epgdata_icon_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='url',
|
||||
field=models.URLField(blank=True, max_length=1000, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-17 17:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0017_alter_epgsource_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='source_type',
|
||||
field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0019_alter_programdata_sub_title.py
Normal file
18
apps/epg/migrations/0019_alter_programdata_sub_title.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-22 21:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0018_epgsource_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='programdata',
|
||||
name='sub_title',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
# Generated migration to replace {time} placeholders with {starttime}
|
||||
|
||||
import re
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_time_placeholders(apps, schema_editor):
|
||||
"""
|
||||
Replace {time} with {starttime} and {time24} with {starttime24}
|
||||
in all dummy EPG source custom_properties templates.
|
||||
"""
|
||||
EPGSource = apps.get_model('epg', 'EPGSource')
|
||||
|
||||
# Fields that contain templates with placeholders
|
||||
template_fields = [
|
||||
'title_template',
|
||||
'description_template',
|
||||
'upcoming_title_template',
|
||||
'upcoming_description_template',
|
||||
'ended_title_template',
|
||||
'ended_description_template',
|
||||
'channel_logo_url',
|
||||
'program_poster_url',
|
||||
]
|
||||
|
||||
# Get all dummy EPG sources
|
||||
dummy_sources = EPGSource.objects.filter(source_type='dummy')
|
||||
|
||||
updated_count = 0
|
||||
for source in dummy_sources:
|
||||
if not source.custom_properties:
|
||||
continue
|
||||
|
||||
modified = False
|
||||
custom_props = source.custom_properties.copy()
|
||||
|
||||
for field in template_fields:
|
||||
if field in custom_props and custom_props[field]:
|
||||
original_value = custom_props[field]
|
||||
|
||||
# Replace {time24} first (before {time}) to avoid double replacement
|
||||
# e.g., {time24} shouldn't become {starttime24} via {time} -> {starttime}
|
||||
new_value = original_value
|
||||
new_value = re.sub(r'\{time24\}', '{starttime24}', new_value)
|
||||
new_value = re.sub(r'\{time\}', '{starttime}', new_value)
|
||||
|
||||
if new_value != original_value:
|
||||
custom_props[field] = new_value
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
source.custom_properties = custom_props
|
||||
source.save(update_fields=['custom_properties'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Migration complete: Updated {updated_count} dummy EPG source(s) with new placeholder names.")
|
||||
else:
|
||||
print("No dummy EPG sources needed placeholder updates.")
|
||||
|
||||
|
||||
def reverse_migration(apps, schema_editor):
|
||||
"""
|
||||
Reverse the migration by replacing {starttime} back to {time}.
|
||||
"""
|
||||
EPGSource = apps.get_model('epg', 'EPGSource')
|
||||
|
||||
template_fields = [
|
||||
'title_template',
|
||||
'description_template',
|
||||
'upcoming_title_template',
|
||||
'upcoming_description_template',
|
||||
'ended_title_template',
|
||||
'ended_description_template',
|
||||
'channel_logo_url',
|
||||
'program_poster_url',
|
||||
]
|
||||
|
||||
dummy_sources = EPGSource.objects.filter(source_type='dummy')
|
||||
|
||||
updated_count = 0
|
||||
for source in dummy_sources:
|
||||
if not source.custom_properties:
|
||||
continue
|
||||
|
||||
modified = False
|
||||
custom_props = source.custom_properties.copy()
|
||||
|
||||
for field in template_fields:
|
||||
if field in custom_props and custom_props[field]:
|
||||
original_value = custom_props[field]
|
||||
|
||||
# Reverse the replacements
|
||||
new_value = original_value
|
||||
new_value = re.sub(r'\{starttime24\}', '{time24}', new_value)
|
||||
new_value = re.sub(r'\{starttime\}', '{time}', new_value)
|
||||
|
||||
if new_value != original_value:
|
||||
custom_props[field] = new_value
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
source.custom_properties = custom_props
|
||||
source.save(update_fields=['custom_properties'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Reverse migration complete: Reverted {updated_count} dummy EPG source(s) to old placeholder names.")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0019_alter_programdata_sub_title'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_time_placeholders, reverse_migration),
|
||||
]
|
||||
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-12-05 15:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0020_migrate_time_to_starttime_placeholders'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -8,32 +8,110 @@ class EPGSource(models.Model):
|
|||
SOURCE_TYPE_CHOICES = [
|
||||
('xmltv', 'XMLTV URL'),
|
||||
('schedules_direct', 'Schedules Direct API'),
|
||||
('dummy', 'Custom Dummy EPG'),
|
||||
]
|
||||
|
||||
STATUS_IDLE = 'idle'
|
||||
STATUS_FETCHING = 'fetching'
|
||||
STATUS_PARSING = 'parsing'
|
||||
STATUS_ERROR = 'error'
|
||||
STATUS_SUCCESS = 'success'
|
||||
STATUS_DISABLED = 'disabled'
|
||||
|
||||
STATUS_CHOICES = [
|
||||
(STATUS_IDLE, 'Idle'),
|
||||
(STATUS_FETCHING, 'Fetching'),
|
||||
(STATUS_PARSING, 'Parsing'),
|
||||
(STATUS_ERROR, 'Error'),
|
||||
(STATUS_SUCCESS, 'Success'),
|
||||
(STATUS_DISABLED, 'Disabled'),
|
||||
]
|
||||
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
source_type = models.CharField(max_length=20, choices=SOURCE_TYPE_CHOICES)
|
||||
url = models.URLField(blank=True, null=True) # For XMLTV
|
||||
url = models.URLField(max_length=1000, blank=True, null=True) # For XMLTV
|
||||
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
|
||||
is_active = models.BooleanField(default=True)
|
||||
file_path = models.CharField(max_length=1024, blank=True, null=True)
|
||||
refresh_interval = models.IntegerField(default=24)
|
||||
extracted_file_path = models.CharField(max_length=1024, blank=True, null=True,
|
||||
help_text="Path to extracted XML file after decompression")
|
||||
refresh_interval = models.IntegerField(default=0)
|
||||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
custom_properties = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel."
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
default=STATUS_IDLE
|
||||
)
|
||||
last_message = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Last status message, including success results or error information"
|
||||
)
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Time when this source was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="Time when this source was last updated"
|
||||
null=True, blank=True,
|
||||
help_text="Time when this source was last successfully refreshed"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def get_cache_file(self):
|
||||
# Decide on file extension
|
||||
file_ext = ".gz" if self.url.lower().endswith('.gz') else ".xml"
|
||||
import mimetypes
|
||||
|
||||
# Use a temporary extension for initial download
|
||||
# The actual extension will be determined after content inspection
|
||||
file_ext = ".tmp"
|
||||
|
||||
# If file_path is already set and contains an extension, use that
|
||||
# This handles cases where we've already detected the proper type
|
||||
if self.file_path and os.path.exists(self.file_path):
|
||||
_, existing_ext = os.path.splitext(self.file_path)
|
||||
if existing_ext:
|
||||
file_ext = existing_ext
|
||||
else:
|
||||
# Try to detect the MIME type and map to extension
|
||||
mime_type, _ = mimetypes.guess_type(self.file_path)
|
||||
if mime_type:
|
||||
if mime_type == 'application/gzip' or mime_type == 'application/x-gzip':
|
||||
file_ext = '.gz'
|
||||
elif mime_type == 'application/zip':
|
||||
file_ext = '.zip'
|
||||
elif mime_type == 'application/xml' or mime_type == 'text/xml':
|
||||
file_ext = '.xml'
|
||||
# For files without mime type detection, try peeking at content
|
||||
else:
|
||||
try:
|
||||
with open(self.file_path, 'rb') as f:
|
||||
header = f.read(4)
|
||||
# Check for gzip magic number (1f 8b)
|
||||
if header[:2] == b'\x1f\x8b':
|
||||
file_ext = '.gz'
|
||||
# Check for zip magic number (PK..)
|
||||
elif header[:2] == b'PK':
|
||||
file_ext = '.zip'
|
||||
# Check for XML
|
||||
elif header[:5] == b'<?xml' or header[:5] == b'<tv>':
|
||||
file_ext = '.xml'
|
||||
except Exception as e:
|
||||
# If we can't read the file, just keep the default extension
|
||||
pass
|
||||
|
||||
filename = f"{self.id}{file_ext}"
|
||||
|
||||
# Build full path in MEDIA_ROOT/cached_epg
|
||||
|
|
@ -46,11 +124,21 @@ class EPGSource(models.Model):
|
|||
|
||||
return cache
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Prevent auto_now behavior by handling updated_at manually
|
||||
if 'update_fields' in kwargs and 'updated_at' not in kwargs['update_fields']:
|
||||
# Don't modify updated_at for regular updates
|
||||
kwargs.setdefault('update_fields', [])
|
||||
if 'updated_at' in kwargs['update_fields']:
|
||||
kwargs['update_fields'].remove('updated_at')
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class EPGData(models.Model):
|
||||
# Removed the Channel foreign key. We now just store the original tvg_id
|
||||
# and a name (which might simply be the tvg_id if no real channel exists).
|
||||
tvg_id = models.CharField(max_length=255, null=True, blank=True, db_index=True)
|
||||
name = models.CharField(max_length=255)
|
||||
icon_url = models.URLField(max_length=500, null=True, blank=True)
|
||||
epg_source = models.ForeignKey(
|
||||
EPGSource,
|
||||
on_delete=models.CASCADE,
|
||||
|
|
@ -71,10 +159,10 @@ class ProgramData(models.Model):
|
|||
start_time = models.DateTimeField()
|
||||
end_time = models.DateTimeField()
|
||||
title = models.CharField(max_length=255)
|
||||
sub_title = models.CharField(max_length=255, blank=True, null=True)
|
||||
sub_title = models.TextField(blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
tvg_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.title} ({self.start_time} - {self.end_time})"
|
||||
|
|
|
|||
|
|
@ -1,17 +1,41 @@
|
|||
from core.utils import validate_flexible_url
|
||||
from rest_framework import serializers
|
||||
from .models import EPGSource, EPGData, ProgramData
|
||||
from apps.channels.models import Channel
|
||||
|
||||
class EPGSourceSerializer(serializers.ModelSerializer):
|
||||
epg_data_ids = serializers.SerializerMethodField()
|
||||
epg_data_count = serializers.SerializerMethodField()
|
||||
read_only_fields = ['created_at', 'updated_at']
|
||||
url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EPGSource
|
||||
fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval', 'created_at', 'updated_at']
|
||||
fields = [
|
||||
'id',
|
||||
'name',
|
||||
'source_type',
|
||||
'url',
|
||||
'api_key',
|
||||
'is_active',
|
||||
'file_path',
|
||||
'refresh_interval',
|
||||
'priority',
|
||||
'status',
|
||||
'last_message',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'custom_properties',
|
||||
'epg_data_count'
|
||||
]
|
||||
|
||||
def get_epg_data_ids(self, obj):
|
||||
return list(obj.epgs.values_list('id', flat=True))
|
||||
def get_epg_data_count(self, obj):
|
||||
"""Return the count of EPG data entries instead of all IDs to prevent large payloads"""
|
||||
return obj.epgs.count()
|
||||
|
||||
class ProgramDataSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
|
|
@ -31,5 +55,6 @@ class EPGDataSerializer(serializers.ModelSerializer):
|
|||
'id',
|
||||
'tvg_id',
|
||||
'name',
|
||||
'icon_url',
|
||||
'epg_source',
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,21 +1,88 @@
|
|||
from django.db.models.signals import post_save, post_delete
|
||||
from django.db.models.signals import post_save, post_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from .models import EPGSource
|
||||
from .tasks import refresh_epg_data
|
||||
from .models import EPGSource, EPGData
|
||||
from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule
|
||||
from core.utils import is_protected_path, send_websocket_update
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs):
|
||||
# Trigger refresh only if the source is newly created and active
|
||||
if created and instance.is_active:
|
||||
# Trigger refresh only if the source is newly created, active, and not a dummy EPG
|
||||
if created and instance.is_active and instance.source_type != 'dummy':
|
||||
refresh_epg_data.delay(instance.id)
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_dummy_epg_data(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Automatically create EPGData for dummy EPG sources when they are created.
|
||||
This allows channels to be assigned to dummy EPGs immediately without
|
||||
requiring a refresh first.
|
||||
"""
|
||||
if instance.source_type == 'dummy':
|
||||
# Ensure dummy EPGs always have idle status and no status message
|
||||
if instance.status != EPGSource.STATUS_IDLE or instance.last_message:
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
instance.save(update_fields=['status', 'last_message'])
|
||||
|
||||
# Create a URL-friendly tvg_id from the dummy EPG name
|
||||
# Replace spaces and special characters with underscores
|
||||
friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_')
|
||||
# Remove any characters that aren't alphanumeric or underscores
|
||||
friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_')
|
||||
# Convert to lowercase for consistency
|
||||
friendly_tvg_id = friendly_tvg_id.lower()
|
||||
# Prefix with 'dummy_' to make it clear this is a dummy EPG
|
||||
friendly_tvg_id = f"dummy_{friendly_tvg_id}"
|
||||
|
||||
# Create or update the EPGData record
|
||||
epg_data, data_created = EPGData.objects.get_or_create(
|
||||
tvg_id=friendly_tvg_id,
|
||||
epg_source=instance,
|
||||
defaults={
|
||||
'name': instance.name,
|
||||
'icon_url': None
|
||||
}
|
||||
)
|
||||
|
||||
# Update name if it changed and record already existed
|
||||
if not data_created and epg_data.name != instance.name:
|
||||
epg_data.name = instance.name
|
||||
epg_data.save(update_fields=['name'])
|
||||
|
||||
if data_created:
|
||||
logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
# Send websocket update to notify frontend that EPG data has been created
|
||||
# This allows the channel form to immediately show the new dummy EPG without refreshing
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'epg_data_created',
|
||||
'source_id': instance.id,
|
||||
'source_name': instance.name,
|
||||
'epg_data_id': epg_data.id
|
||||
})
|
||||
else:
|
||||
logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_or_update_refresh_task(sender, instance, **kwargs):
|
||||
"""
|
||||
Create or update a Celery Beat periodic task when an EPGSource is created/updated.
|
||||
Skip creating tasks for dummy EPG sources as they don't need refreshing.
|
||||
"""
|
||||
# Skip task creation for dummy EPGs
|
||||
if instance.source_type == 'dummy':
|
||||
# If there's an existing task, disable it
|
||||
if instance.refresh_task:
|
||||
instance.refresh_task.enabled = False
|
||||
instance.refresh_task.save(update_fields=['enabled'])
|
||||
return
|
||||
|
||||
task_name = f"epg_source-refresh-{instance.id}"
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(
|
||||
every=int(instance.refresh_interval),
|
||||
|
|
@ -26,7 +93,7 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
|
|||
"interval": interval,
|
||||
"task": "apps.epg.tasks.refresh_epg_data",
|
||||
"kwargs": json.dumps({"source_id": instance.id}),
|
||||
"enabled": instance.refresh_interval != 0,
|
||||
"enabled": instance.refresh_interval != 0 and instance.is_active,
|
||||
})
|
||||
|
||||
update_fields = []
|
||||
|
|
@ -36,8 +103,11 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
|
|||
if task.interval != interval:
|
||||
task.interval = interval
|
||||
update_fields.append("interval")
|
||||
if task.enabled != (instance.refresh_interval != 0):
|
||||
task.enabled = instance.refresh_interval != 0
|
||||
|
||||
# Check both refresh_interval and is_active to determine if task should be enabled
|
||||
should_be_enabled = instance.refresh_interval != 0 and instance.is_active
|
||||
if task.enabled != should_be_enabled:
|
||||
task.enabled = should_be_enabled
|
||||
update_fields.append("enabled")
|
||||
|
||||
if update_fields:
|
||||
|
|
@ -45,12 +115,82 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
|
|||
|
||||
if instance.refresh_task != task:
|
||||
instance.refresh_task = task
|
||||
instance.save(update_fields=update_fields)
|
||||
instance.save(update_fields=["refresh_task"]) # Fixed field name
|
||||
|
||||
@receiver(post_delete, sender=EPGSource)
|
||||
def delete_refresh_task(sender, instance, **kwargs):
|
||||
"""
|
||||
Delete the associated Celery Beat periodic task when a Channel is deleted.
|
||||
Delete the associated Celery Beat periodic task when an EPGSource is deleted.
|
||||
"""
|
||||
if instance.refresh_task:
|
||||
instance.refresh_task.delete()
|
||||
try:
|
||||
# First try the foreign key relationship to find the task ID
|
||||
task = None
|
||||
if instance.refresh_task:
|
||||
logger.info(f"Found task via foreign key: {instance.refresh_task.id} for EPGSource {instance.id}")
|
||||
task = instance.refresh_task
|
||||
|
||||
# Store task ID before deletion if we need to bypass the helper function
|
||||
if task:
|
||||
delete_epg_refresh_task_by_id(instance.id)
|
||||
else:
|
||||
# Otherwise use the helper function
|
||||
delete_epg_refresh_task_by_id(instance.id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in delete_refresh_task signal handler: {str(e)}", exc_info=True)
|
||||
|
||||
@receiver(pre_save, sender=EPGSource)
|
||||
def update_status_on_active_change(sender, instance, **kwargs):
|
||||
"""
|
||||
When an EPGSource's is_active field changes, update the status accordingly.
|
||||
For dummy EPGs, always ensure status is idle and no status message.
|
||||
"""
|
||||
# Dummy EPGs should always be idle with no status message
|
||||
if instance.source_type == 'dummy':
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
return
|
||||
|
||||
if instance.pk: # Only for existing records, not new ones
|
||||
try:
|
||||
# Get the current record from the database
|
||||
old_instance = EPGSource.objects.get(pk=instance.pk)
|
||||
|
||||
# If is_active changed, update the status
|
||||
if old_instance.is_active != instance.is_active:
|
||||
if instance.is_active:
|
||||
# When activating, set status to idle
|
||||
instance.status = 'idle'
|
||||
else:
|
||||
# When deactivating, set status to disabled
|
||||
instance.status = 'disabled'
|
||||
except EPGSource.DoesNotExist:
|
||||
# New record, will use default status
|
||||
pass
|
||||
|
||||
@receiver(post_delete, sender=EPGSource)
|
||||
def delete_cached_files(sender, instance, **kwargs):
|
||||
"""
|
||||
Delete cached files associated with an EPGSource when it's deleted.
|
||||
Only deletes files that aren't in protected directories.
|
||||
"""
|
||||
# Check and delete the main file path if not protected
|
||||
if instance.file_path and os.path.exists(instance.file_path):
|
||||
if is_protected_path(instance.file_path):
|
||||
logger.info(f"Skipping deletion of protected file: {instance.file_path}")
|
||||
else:
|
||||
try:
|
||||
os.remove(instance.file_path)
|
||||
logger.info(f"Deleted cached file: {instance.file_path}")
|
||||
except OSError as e:
|
||||
logger.error(f"Error deleting cached file {instance.file_path}: {e}")
|
||||
|
||||
# Check and delete the extracted file path if it exists, is different from main path, and not protected
|
||||
if instance.extracted_file_path and os.path.exists(instance.extracted_file_path) and instance.extracted_file_path != instance.file_path:
|
||||
if is_protected_path(instance.extracted_file_path):
|
||||
logger.info(f"Skipping deletion of protected extracted file: {instance.extracted_file_path}")
|
||||
else:
|
||||
try:
|
||||
os.remove(instance.extracted_file_path)
|
||||
logger.info(f"Deleted extracted file: {instance.extracted_file_path}")
|
||||
except OSError as e:
|
||||
logger.error(f"Error deleting extracted file {instance.extracted_file_path}: {e}")
|
||||
|
|
|
|||
2194
apps/epg/tasks.py
2194
apps/epg/tasks.py
File diff suppressed because it is too large
Load diff
|
|
@ -1,7 +1,7 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import Authenticated, permission_classes_by_action
|
||||
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
|
||||
import logging
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
|
|
@ -17,22 +17,30 @@ from django.views import View
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@login_required
|
||||
def hdhr_dashboard_view(request):
|
||||
"""Render the HDHR management page."""
|
||||
hdhr_devices = HDHRDevice.objects.all()
|
||||
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
|
||||
|
||||
|
||||
# 🔹 1) HDHomeRun Device API
|
||||
class HDHRDeviceViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for HDHomeRun devices"""
|
||||
|
||||
queryset = HDHRDevice.objects.all()
|
||||
serializer_class = HDHRDeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
# 🔹 2) Discover API
|
||||
|
|
@ -41,56 +49,33 @@ class DiscoverAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve HDHomeRun device discovery information",
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")}
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
uri_parts = ["hdhr"]
|
||||
if profile is not None:
|
||||
uri_parts.append(profile)
|
||||
|
||||
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/')
|
||||
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip("/")
|
||||
device = HDHRDevice.objects.first()
|
||||
|
||||
# Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile)
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
is_active=True,
|
||||
m3u_account__is_active=True # Only include profiles from enabled M3U accounts
|
||||
).exclude(id=1)
|
||||
|
||||
# 1. Check if any profile has unlimited streams (max_streams=0)
|
||||
has_unlimited = profiles.filter(max_streams=0).exists()
|
||||
|
||||
# 2. Calculate tuner count from limited profiles
|
||||
limited_tuners = 0
|
||||
if not has_unlimited:
|
||||
limited_tuners = profiles.filter(max_streams__gt=0).aggregate(
|
||||
total=models.Sum('max_streams')
|
||||
).get('total', 0) or 0
|
||||
|
||||
# 3. Add custom stream count to tuner count
|
||||
custom_stream_count = Stream.objects.filter(is_custom=True).count()
|
||||
logger.debug(f"Found {custom_stream_count} custom streams")
|
||||
|
||||
# 4. Calculate final tuner count
|
||||
if has_unlimited:
|
||||
# If there are unlimited profiles, start with 10 plus custom streams
|
||||
tuner_count = 10 + custom_stream_count
|
||||
else:
|
||||
# Otherwise use the limited profile sum plus custom streams
|
||||
tuner_count = limited_tuners + custom_stream_count
|
||||
|
||||
# 5. Ensure minimum of 2 tuners
|
||||
tuner_count = max(2, tuner_count)
|
||||
|
||||
logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})")
|
||||
# Calculate tuner count using centralized function
|
||||
from apps.m3u.utils import calculate_tuner_count
|
||||
tuner_count = calculate_tuner_count(minimum=1, unlimited_default=10)
|
||||
|
||||
# Create a unique DeviceID for the HDHomeRun device based on profile ID or a default value
|
||||
device_ID = "12345678" # Default DeviceID
|
||||
friendly_name = "Dispatcharr HDHomeRun"
|
||||
if profile is not None:
|
||||
device_ID = f"dispatcharr-hdhr-{profile}"
|
||||
friendly_name = f"Dispatcharr HDHomeRun - {profile}"
|
||||
if not device:
|
||||
data = {
|
||||
"FriendlyName": "Dispatcharr HDHomeRun",
|
||||
"FriendlyName": friendly_name,
|
||||
"ModelNumber": "HDTC-2US",
|
||||
"FirmwareName": "hdhomerun3_atsc",
|
||||
"FirmwareVersion": "20200101",
|
||||
"DeviceID": "12345678",
|
||||
"DeviceID": device_ID,
|
||||
"DeviceAuth": "test_auth_token",
|
||||
"BaseURL": base_url,
|
||||
"LineupURL": f"{base_url}/lineup.json",
|
||||
|
|
@ -117,28 +102,38 @@ class LineupAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the available channel lineup",
|
||||
responses={200: openapi.Response("Channel Lineup JSON")}
|
||||
responses={200: openapi.Response("Channel Lineup JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
if profile is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.all().order_by('channel_number')
|
||||
channels = Channel.objects.all().order_by("channel_number")
|
||||
|
||||
lineup = [
|
||||
{
|
||||
"GuideNumber": str(ch.channel_number),
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
"Guide_ID": str(ch.channel_number),
|
||||
"Station": str(ch.channel_number),
|
||||
}
|
||||
for ch in channels
|
||||
]
|
||||
lineup = []
|
||||
for ch in channels:
|
||||
# Format channel number as integer if it has no decimal component
|
||||
if ch.channel_number is not None:
|
||||
if ch.channel_number == int(ch.channel_number):
|
||||
formatted_channel_number = str(int(ch.channel_number))
|
||||
else:
|
||||
formatted_channel_number = str(ch.channel_number)
|
||||
else:
|
||||
formatted_channel_number = ""
|
||||
|
||||
lineup.append(
|
||||
{
|
||||
"GuideNumber": formatted_channel_number,
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
"Guide_ID": formatted_channel_number,
|
||||
"Station": formatted_channel_number,
|
||||
}
|
||||
)
|
||||
return JsonResponse(lineup, safe=False)
|
||||
|
||||
|
||||
|
|
@ -148,14 +143,14 @@ class LineupStatusAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun lineup status",
|
||||
responses={200: openapi.Response("Lineup Status JSON")}
|
||||
responses={200: openapi.Response("Lineup Status JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
data = {
|
||||
"ScanInProgress": 0,
|
||||
"ScanPossible": 0,
|
||||
"Source": "Cable",
|
||||
"SourceList": ["Cable"]
|
||||
"SourceList": ["Cable"],
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
|
|
@ -166,10 +161,10 @@ class HDHRDeviceXMLAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun device XML configuration",
|
||||
responses={200: openapi.Response("HDHR Device XML")}
|
||||
responses={200: openapi.Response("HDHR Device XML")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
|
||||
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import socket
|
||||
import threading
|
||||
import time
|
||||
import gevent # Add this import
|
||||
from django.conf import settings
|
||||
|
||||
# SSDP Multicast Address and Port
|
||||
|
|
@ -59,7 +60,7 @@ def ssdp_broadcaster(host_ip):
|
|||
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
|
||||
while True:
|
||||
sock.sendto(notify.encode("utf-8"), (SSDP_MULTICAST, SSDP_PORT))
|
||||
time.sleep(30)
|
||||
gevent.sleep(30) # Replace time.sleep with gevent.sleep
|
||||
|
||||
def start_ssdp():
|
||||
host_ip = get_host_ip()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import Authenticated, permission_classes_by_action
|
||||
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
|
|
@ -16,18 +16,26 @@ from django.utils.decorators import method_decorator
|
|||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
|
||||
@login_required
|
||||
def hdhr_dashboard_view(request):
|
||||
"""Render the HDHR management page."""
|
||||
hdhr_devices = HDHRDevice.objects.all()
|
||||
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
|
||||
|
||||
|
||||
# 🔹 1) HDHomeRun Device API
|
||||
class HDHRDeviceViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for HDHomeRun devices"""
|
||||
|
||||
queryset = HDHRDevice.objects.all()
|
||||
serializer_class = HDHRDeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
# 🔹 2) Discover API
|
||||
|
|
@ -36,10 +44,10 @@ class DiscoverAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve HDHomeRun device discovery information",
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")}
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
device = HDHRDevice.objects.first()
|
||||
|
||||
if not device:
|
||||
|
|
@ -75,15 +83,15 @@ class LineupAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the available channel lineup",
|
||||
responses={200: openapi.Response("Channel Lineup JSON")}
|
||||
responses={200: openapi.Response("Channel Lineup JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
channels = Channel.objects.all().order_by('channel_number')
|
||||
channels = Channel.objects.all().order_by("channel_number")
|
||||
lineup = [
|
||||
{
|
||||
"GuideNumber": str(ch.channel_number),
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}")
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
}
|
||||
for ch in channels
|
||||
]
|
||||
|
|
@ -96,14 +104,14 @@ class LineupStatusAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun lineup status",
|
||||
responses={200: openapi.Response("Lineup Status JSON")}
|
||||
responses={200: openapi.Response("Lineup Status JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
data = {
|
||||
"ScanInProgress": 0,
|
||||
"ScanPossible": 0,
|
||||
"Source": "Cable",
|
||||
"SourceList": ["Cable"]
|
||||
"SourceList": ["Cable"],
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
|
|
@ -114,10 +122,10 @@ class HDHRDeviceXMLAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun device XML configuration",
|
||||
responses={200: openapi.Response("HDHR Device XML")}
|
||||
responses={200: openapi.Response("HDHR Device XML")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
|
||||
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent, M3UAccountProfile
|
||||
import json
|
||||
|
||||
|
||||
class M3UFilterInline(admin.TabularInline):
|
||||
model = M3UFilter
|
||||
|
|
@ -8,50 +10,181 @@ class M3UFilterInline(admin.TabularInline):
|
|||
verbose_name = "M3U Filter"
|
||||
verbose_name_plural = "M3U Filters"
|
||||
|
||||
|
||||
@admin.register(M3UAccount)
|
||||
class M3UAccountAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'server_url', 'server_group', 'max_streams', 'is_active', 'user_agent_display', 'uploaded_file_link', 'created_at', 'updated_at')
|
||||
list_filter = ('is_active', 'server_group')
|
||||
search_fields = ('name', 'server_url', 'server_group__name')
|
||||
list_display = (
|
||||
"name",
|
||||
"server_url",
|
||||
"server_group",
|
||||
"max_streams",
|
||||
"priority",
|
||||
"is_active",
|
||||
"user_agent_display",
|
||||
"uploaded_file_link",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
list_filter = ("is_active", "server_group")
|
||||
search_fields = ("name", "server_url", "server_group__name")
|
||||
inlines = [M3UFilterInline]
|
||||
actions = ['activate_accounts', 'deactivate_accounts']
|
||||
actions = ["activate_accounts", "deactivate_accounts"]
|
||||
|
||||
# Handle both ForeignKey and ManyToManyField cases for UserAgent
|
||||
def user_agent_display(self, obj):
|
||||
if hasattr(obj, 'user_agent'): # ForeignKey case
|
||||
if hasattr(obj, "user_agent"): # ForeignKey case
|
||||
return obj.user_agent.user_agent if obj.user_agent else "None"
|
||||
elif hasattr(obj, 'user_agents'): # ManyToManyField case
|
||||
elif hasattr(obj, "user_agents"): # ManyToManyField case
|
||||
return ", ".join([ua.user_agent for ua in obj.user_agents.all()]) or "None"
|
||||
return "None"
|
||||
|
||||
user_agent_display.short_description = "User Agent(s)"
|
||||
|
||||
def vod_enabled_display(self, obj):
|
||||
"""Display whether VOD is enabled for this account"""
|
||||
if obj.custom_properties:
|
||||
custom_props = obj.custom_properties or {}
|
||||
return "Yes" if custom_props.get('enable_vod', False) else "No"
|
||||
return "No"
|
||||
vod_enabled_display.short_description = "VOD Enabled"
|
||||
vod_enabled_display.boolean = True
|
||||
|
||||
def uploaded_file_link(self, obj):
|
||||
if obj.uploaded_file:
|
||||
return format_html("<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url)
|
||||
return format_html(
|
||||
"<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url
|
||||
)
|
||||
return "No file uploaded"
|
||||
|
||||
uploaded_file_link.short_description = "Uploaded File"
|
||||
|
||||
@admin.action(description='Activate selected accounts')
|
||||
@admin.action(description="Activate selected accounts")
|
||||
def activate_accounts(self, request, queryset):
|
||||
queryset.update(is_active=True)
|
||||
|
||||
@admin.action(description='Deactivate selected accounts')
|
||||
@admin.action(description="Deactivate selected accounts")
|
||||
def deactivate_accounts(self, request, queryset):
|
||||
queryset.update(is_active=False)
|
||||
|
||||
# Add ManyToManyField for Django Admin (if applicable)
|
||||
if hasattr(M3UAccount, 'user_agents'):
|
||||
filter_horizontal = ('user_agents',) # Only for ManyToManyField
|
||||
if hasattr(M3UAccount, "user_agents"):
|
||||
filter_horizontal = ("user_agents",) # Only for ManyToManyField
|
||||
|
||||
|
||||
@admin.register(M3UFilter)
|
||||
class M3UFilterAdmin(admin.ModelAdmin):
|
||||
list_display = ('m3u_account', 'filter_type', 'regex_pattern', 'exclude')
|
||||
list_filter = ('filter_type', 'exclude')
|
||||
search_fields = ('regex_pattern',)
|
||||
ordering = ('m3u_account',)
|
||||
list_display = ("m3u_account", "filter_type", "regex_pattern", "exclude")
|
||||
list_filter = ("filter_type", "exclude")
|
||||
search_fields = ("regex_pattern",)
|
||||
ordering = ("m3u_account",)
|
||||
|
||||
|
||||
@admin.register(ServerGroup)
|
||||
class ServerGroupAdmin(admin.ModelAdmin):
|
||||
list_display = ('name',)
|
||||
search_fields = ('name',)
|
||||
list_display = ("name",)
|
||||
search_fields = ("name",)
|
||||
|
||||
|
||||
@admin.register(M3UAccountProfile)
|
||||
class M3UAccountProfileAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"name",
|
||||
"m3u_account",
|
||||
"is_default",
|
||||
"is_active",
|
||||
"max_streams",
|
||||
"current_viewers",
|
||||
"account_status_display",
|
||||
"account_expiration_display",
|
||||
"last_refresh_display",
|
||||
)
|
||||
list_filter = ("is_active", "is_default", "m3u_account__account_type")
|
||||
search_fields = ("name", "m3u_account__name")
|
||||
readonly_fields = ("account_info_display",)
|
||||
|
||||
def account_status_display(self, obj):
|
||||
"""Display account status from custom properties"""
|
||||
status = obj.get_account_status()
|
||||
if status:
|
||||
# Create colored status display
|
||||
color_map = {
|
||||
'Active': 'green',
|
||||
'Expired': 'red',
|
||||
'Disabled': 'red',
|
||||
'Banned': 'red',
|
||||
}
|
||||
color = color_map.get(status, 'black')
|
||||
return format_html(
|
||||
'<span style="color: {};">{}</span>',
|
||||
color,
|
||||
status
|
||||
)
|
||||
return "Unknown"
|
||||
account_status_display.short_description = "Account Status"
|
||||
|
||||
def account_expiration_display(self, obj):
|
||||
"""Display account expiration from custom properties"""
|
||||
expiration = obj.get_account_expiration()
|
||||
if expiration:
|
||||
from datetime import datetime
|
||||
if expiration < datetime.now():
|
||||
return format_html(
|
||||
'<span style="color: red;">{}</span>',
|
||||
expiration.strftime('%Y-%m-%d %H:%M')
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: green;">{}</span>',
|
||||
expiration.strftime('%Y-%m-%d %H:%M')
|
||||
)
|
||||
return "Unknown"
|
||||
account_expiration_display.short_description = "Expires"
|
||||
|
||||
def last_refresh_display(self, obj):
|
||||
"""Display last refresh time from custom properties"""
|
||||
last_refresh = obj.get_last_refresh()
|
||||
if last_refresh:
|
||||
return last_refresh.strftime('%Y-%m-%d %H:%M:%S')
|
||||
return "Never"
|
||||
last_refresh_display.short_description = "Last Refresh"
|
||||
|
||||
def account_info_display(self, obj):
|
||||
"""Display formatted account information from custom properties"""
|
||||
if not obj.custom_properties:
|
||||
return "No account information available"
|
||||
|
||||
html_parts = []
|
||||
|
||||
# User Info
|
||||
user_info = obj.custom_properties.get('user_info', {})
|
||||
if user_info:
|
||||
html_parts.append("<h3>User Information:</h3>")
|
||||
html_parts.append("<ul>")
|
||||
for key, value in user_info.items():
|
||||
if key == 'exp_date' and value:
|
||||
try:
|
||||
from datetime import datetime
|
||||
exp_date = datetime.fromtimestamp(float(value))
|
||||
value = exp_date.strftime('%Y-%m-%d %H:%M:%S')
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
|
||||
html_parts.append("</ul>")
|
||||
|
||||
# Server Info
|
||||
server_info = obj.custom_properties.get('server_info', {})
|
||||
if server_info:
|
||||
html_parts.append("<h3>Server Information:</h3>")
|
||||
html_parts.append("<ul>")
|
||||
for key, value in server_info.items():
|
||||
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
|
||||
html_parts.append("</ul>")
|
||||
|
||||
# Last Refresh
|
||||
last_refresh = obj.custom_properties.get('last_refresh')
|
||||
if last_refresh:
|
||||
html_parts.append(f"<p><strong>Last Refresh:</strong> {last_refresh}</p>")
|
||||
|
||||
return format_html(''.join(html_parts)) if html_parts else "No account information available"
|
||||
|
||||
account_info_display.short_description = "Account Information"
|
||||
|
|
|
|||
|
|
@ -1,18 +1,44 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import M3UAccountViewSet, M3UFilterViewSet, ServerGroupViewSet, RefreshM3UAPIView, RefreshSingleM3UAPIView, UserAgentViewSet, M3UAccountProfileViewSet
|
||||
from .api_views import (
|
||||
M3UAccountViewSet,
|
||||
M3UFilterViewSet,
|
||||
ServerGroupViewSet,
|
||||
RefreshM3UAPIView,
|
||||
RefreshSingleM3UAPIView,
|
||||
RefreshAccountInfoAPIView,
|
||||
UserAgentViewSet,
|
||||
M3UAccountProfileViewSet,
|
||||
)
|
||||
|
||||
app_name = 'm3u'
|
||||
app_name = "m3u"
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'accounts', M3UAccountViewSet, basename='m3u-account')
|
||||
router.register(r'accounts\/(?P<account_id>\d+)\/profiles', M3UAccountProfileViewSet, basename='m3u-account-profiles')
|
||||
router.register(r'filters', M3UFilterViewSet, basename='m3u-filter')
|
||||
router.register(r'server-groups', ServerGroupViewSet, basename='server-group')
|
||||
router.register(r"accounts", M3UAccountViewSet, basename="m3u-account")
|
||||
router.register(
|
||||
r"accounts\/(?P<account_id>\d+)\/profiles",
|
||||
M3UAccountProfileViewSet,
|
||||
basename="m3u-account-profiles",
|
||||
)
|
||||
router.register(
|
||||
r"accounts\/(?P<account_id>\d+)\/filters",
|
||||
M3UFilterViewSet,
|
||||
basename="m3u-filters",
|
||||
)
|
||||
router.register(r"server-groups", ServerGroupViewSet, basename="server-group")
|
||||
|
||||
urlpatterns = [
|
||||
path('refresh/', RefreshM3UAPIView.as_view(), name='m3u_refresh'),
|
||||
path('refresh/<int:account_id>/', RefreshSingleM3UAPIView.as_view(), name='m3u_refresh_single'),
|
||||
path("refresh/", RefreshM3UAPIView.as_view(), name="m3u_refresh"),
|
||||
path(
|
||||
"refresh/<int:account_id>/",
|
||||
RefreshSingleM3UAPIView.as_view(),
|
||||
name="m3u_refresh_single",
|
||||
),
|
||||
path(
|
||||
"refresh-account-info/<int:profile_id>/",
|
||||
RefreshAccountInfoAPIView.as_view(),
|
||||
name="m3u_refresh_account_info",
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
permission_classes_by_method,
|
||||
)
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
|
@ -10,13 +14,15 @@ from django.core.cache import cache
|
|||
import os
|
||||
from rest_framework.decorators import action
|
||||
from django.conf import settings
|
||||
from .tasks import refresh_m3u_groups
|
||||
import json
|
||||
|
||||
# Import all models, including UserAgent.
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
||||
from core.models import UserAgent
|
||||
from apps.channels.models import ChannelGroupM3UAccount
|
||||
from core.serializers import UserAgentSerializer
|
||||
# Import all serializers, including the UserAgentSerializer.
|
||||
from apps.vod.models import M3UVODCategoryRelation
|
||||
|
||||
from .serializers import (
|
||||
M3UAccountSerializer,
|
||||
M3UFilterSerializer,
|
||||
|
|
@ -24,130 +30,455 @@ from .serializers import (
|
|||
M3UAccountProfileSerializer,
|
||||
)
|
||||
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.files.base import ContentFile
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts, refresh_account_info
|
||||
import json
|
||||
|
||||
|
||||
class M3UAccountViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for M3U accounts"""
|
||||
queryset = M3UAccount.objects.prefetch_related('channel_group')
|
||||
|
||||
queryset = M3UAccount.objects.prefetch_related("channel_group")
|
||||
serializer_class = M3UAccountSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
# Handle file upload first, if any
|
||||
file_path = None
|
||||
if 'file' in request.FILES:
|
||||
file = request.FILES['file']
|
||||
if "file" in request.FILES:
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/m3us', file_name)
|
||||
file_path = os.path.join("/data/uploads/m3us", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
# Add file_path to the request data so it's available during creation
|
||||
request.data._mutable = True # Allow modification of the request data
|
||||
request.data['file_path'] = file_path # Include the file path if a file was uploaded
|
||||
request.data.pop('server_url')
|
||||
request.data["file_path"] = (
|
||||
file_path # Include the file path if a file was uploaded
|
||||
)
|
||||
|
||||
# Handle the user_agent field - convert "null" string to None
|
||||
if "user_agent" in request.data and request.data["user_agent"] == "null":
|
||||
request.data["user_agent"] = None
|
||||
|
||||
# Handle server_url appropriately
|
||||
if "server_url" in request.data and not request.data["server_url"]:
|
||||
request.data.pop("server_url")
|
||||
|
||||
request.data._mutable = False # Make the request data immutable again
|
||||
|
||||
# Now call super().create() to create the instance
|
||||
response = super().create(request, *args, **kwargs)
|
||||
|
||||
account_type = response.data.get("account_type")
|
||||
account_id = response.data.get("id")
|
||||
|
||||
# Notify frontend that a new playlist was created
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'playlist_created',
|
||||
'playlist_id': account_id
|
||||
})
|
||||
|
||||
if account_type == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(account_id)
|
||||
|
||||
# Check if VOD is enabled
|
||||
enable_vod = request.data.get("enable_vod", False)
|
||||
if enable_vod:
|
||||
from apps.vod.tasks import refresh_categories
|
||||
|
||||
refresh_categories(account_id)
|
||||
|
||||
# After the instance is created, return the response
|
||||
return response
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
old_vod_enabled = False
|
||||
|
||||
# Check current VOD setting
|
||||
if instance.custom_properties:
|
||||
custom_props = instance.custom_properties or {}
|
||||
old_vod_enabled = custom_props.get("enable_vod", False)
|
||||
|
||||
# Handle file upload first, if any
|
||||
file_path = None
|
||||
if 'file' in request.FILES:
|
||||
file = request.FILES['file']
|
||||
if "file" in request.FILES:
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/m3us', file_name)
|
||||
file_path = os.path.join("/data/uploads/m3us", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
# Add file_path to the request data so it's available during creation
|
||||
request.data._mutable = True # Allow modification of the request data
|
||||
request.data['file_path'] = file_path # Include the file path if a file was uploaded
|
||||
request.data.pop('server_url')
|
||||
request.data["file_path"] = (
|
||||
file_path # Include the file path if a file was uploaded
|
||||
)
|
||||
|
||||
# Handle the user_agent field - convert "null" string to None
|
||||
if "user_agent" in request.data and request.data["user_agent"] == "null":
|
||||
request.data["user_agent"] = None
|
||||
|
||||
# Handle server_url appropriately
|
||||
if "server_url" in request.data and not request.data["server_url"]:
|
||||
request.data.pop("server_url")
|
||||
|
||||
request.data._mutable = False # Make the request data immutable again
|
||||
|
||||
if instance.file_path and os.path.exists(instance.file_path):
|
||||
os.remove(instance.file_path)
|
||||
|
||||
# Now call super().create() to create the instance
|
||||
# Now call super().update() to update the instance
|
||||
response = super().update(request, *args, **kwargs)
|
||||
|
||||
# After the instance is created, return the response
|
||||
# Check if VOD setting changed and trigger refresh if needed
|
||||
new_vod_enabled = request.data.get("enable_vod", old_vod_enabled)
|
||||
|
||||
if (
|
||||
instance.account_type == M3UAccount.Types.XC
|
||||
and not old_vod_enabled
|
||||
and new_vod_enabled
|
||||
):
|
||||
# Create Uncategorized categories immediately so they're available in the UI
|
||||
from apps.vod.models import VODCategory, M3UVODCategoryRelation
|
||||
|
||||
# Create movie Uncategorized category
|
||||
movie_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="movie",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Create series Uncategorized category
|
||||
series_category, _ = VODCategory.objects.get_or_create(
|
||||
name="Uncategorized",
|
||||
category_type="series",
|
||||
defaults={}
|
||||
)
|
||||
|
||||
# Create relations for both categories (disabled by default until first refresh)
|
||||
account_custom_props = instance.custom_properties or {}
|
||||
auto_enable_new = account_custom_props.get("auto_enable_new_groups_vod", True)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=movie_category,
|
||||
m3u_account=instance,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
M3UVODCategoryRelation.objects.get_or_create(
|
||||
category=series_category,
|
||||
m3u_account=instance,
|
||||
defaults={
|
||||
'enabled': auto_enable_new,
|
||||
'custom_properties': {}
|
||||
}
|
||||
)
|
||||
|
||||
# Trigger full VOD refresh
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(instance.id)
|
||||
|
||||
# After the instance is updated, return the response
|
||||
return response
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
"""Handle partial updates with special logic for is_active field"""
|
||||
instance = self.get_object()
|
||||
|
||||
# Check if we're toggling is_active
|
||||
if (
|
||||
"is_active" in request.data
|
||||
and instance.is_active != request.data["is_active"]
|
||||
):
|
||||
# Set appropriate status based on new is_active value
|
||||
if request.data["is_active"]:
|
||||
request.data["status"] = M3UAccount.Status.IDLE
|
||||
else:
|
||||
request.data["status"] = M3UAccount.Status.DISABLED
|
||||
|
||||
# Continue with regular partial update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@action(detail=True, methods=["post"], url_path="refresh-vod")
|
||||
def refresh_vod(self, request, pk=None):
|
||||
"""Trigger VOD content refresh for XtreamCodes accounts"""
|
||||
account = self.get_object()
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
return Response(
|
||||
{"error": "VOD refresh is only available for XtreamCodes accounts"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if VOD is enabled
|
||||
vod_enabled = False
|
||||
if account.custom_properties:
|
||||
custom_props = account.custom_properties or {}
|
||||
vod_enabled = custom_props.get("enable_vod", False)
|
||||
|
||||
if not vod_enabled:
|
||||
return Response(
|
||||
{"error": "VOD is not enabled for this account"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(account.id)
|
||||
return Response(
|
||||
{"message": f"VOD refresh initiated for account {account.name}"},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Failed to initiate VOD refresh: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["patch"], url_path="group-settings")
|
||||
def update_group_settings(self, request, pk=None):
|
||||
"""Update auto channel sync settings for M3U account groups"""
|
||||
account = self.get_object()
|
||||
group_settings = request.data.get("group_settings", [])
|
||||
category_settings = request.data.get("category_settings", [])
|
||||
|
||||
try:
|
||||
for setting in group_settings:
|
||||
group_id = setting.get("channel_group")
|
||||
enabled = setting.get("enabled", True)
|
||||
auto_sync = setting.get("auto_channel_sync", False)
|
||||
sync_start = setting.get("auto_sync_channel_start")
|
||||
custom_properties = setting.get("custom_properties", {})
|
||||
|
||||
if group_id:
|
||||
ChannelGroupM3UAccount.objects.update_or_create(
|
||||
channel_group_id=group_id,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
"enabled": enabled,
|
||||
"auto_channel_sync": auto_sync,
|
||||
"auto_sync_channel_start": sync_start,
|
||||
"custom_properties": custom_properties,
|
||||
},
|
||||
)
|
||||
|
||||
for setting in category_settings:
|
||||
category_id = setting.get("id")
|
||||
enabled = setting.get("enabled", True)
|
||||
custom_properties = setting.get("custom_properties", {})
|
||||
|
||||
if category_id:
|
||||
M3UVODCategoryRelation.objects.update_or_create(
|
||||
category_id=category_id,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
"enabled": enabled,
|
||||
"custom_properties": custom_properties,
|
||||
},
|
||||
)
|
||||
|
||||
return Response({"message": "Group settings updated successfully"})
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Failed to update group settings: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class M3UFilterViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for M3U filters"""
|
||||
queryset = M3UFilter.objects.all()
|
||||
serializer_class = M3UFilterSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
m3u_account_id = self.kwargs["account_id"]
|
||||
return M3UFilter.objects.filter(m3u_account_id=m3u_account_id)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
# Get the account ID from the URL
|
||||
account_id = self.kwargs["account_id"]
|
||||
|
||||
# # Get the M3UAccount instance for the account_id
|
||||
# m3u_account = M3UAccount.objects.get(id=account_id)
|
||||
|
||||
# Save the 'm3u_account' in the serializer context
|
||||
serializer.context["m3u_account"] = account_id
|
||||
|
||||
# Perform the actual save
|
||||
serializer.save(m3u_account_id=account_id)
|
||||
|
||||
|
||||
class ServerGroupViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Server Groups"""
|
||||
|
||||
queryset = ServerGroup.objects.all()
|
||||
serializer_class = ServerGroupSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
class RefreshM3UAPIView(APIView):
|
||||
"""Triggers refresh for all active M3U accounts"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of all active M3U accounts",
|
||||
responses={202: "M3U refresh initiated"}
|
||||
responses={202: "M3U refresh initiated"},
|
||||
)
|
||||
def post(self, request, format=None):
|
||||
refresh_m3u_accounts.delay()
|
||||
return Response({'success': True, 'message': 'M3U refresh initiated.'}, status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{"success": True, "message": "M3U refresh initiated."},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
class RefreshSingleM3UAPIView(APIView):
|
||||
"""Triggers refresh for a single M3U account"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of a single M3U account",
|
||||
responses={202: "M3U account refresh initiated"}
|
||||
responses={202: "M3U account refresh initiated"},
|
||||
)
|
||||
def post(self, request, account_id, format=None):
|
||||
refresh_single_m3u_account.delay(account_id)
|
||||
return Response({'success': True, 'message': f'M3U account {account_id} refresh initiated.'},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"message": f"M3U account {account_id} refresh initiated.",
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
class RefreshAccountInfoAPIView(APIView):
|
||||
"""Triggers account info refresh for a single M3U account"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of account information for a specific M3U profile",
|
||||
responses={202: "Account info refresh initiated", 400: "Profile not found or not XtreamCodes"},
|
||||
)
|
||||
def post(self, request, profile_id, format=None):
|
||||
try:
|
||||
from .models import M3UAccountProfile
|
||||
profile = M3UAccountProfile.objects.get(id=profile_id)
|
||||
account = profile.m3u_account
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Account info refresh is only available for XtreamCodes accounts",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
refresh_account_info.delay(profile_id)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"message": f"Account info refresh initiated for profile {profile.name}.",
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Profile not found",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
class UserAgentViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for User Agents"""
|
||||
|
||||
queryset = UserAgent.objects.all()
|
||||
serializer_class = UserAgentSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
class M3UAccountProfileViewSet(viewsets.ModelViewSet):
|
||||
queryset = M3UAccountProfile.objects.all()
|
||||
serializer_class = M3UAccountProfileSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
m3u_account_id = self.kwargs['account_id']
|
||||
m3u_account_id = self.kwargs["account_id"]
|
||||
return M3UAccountProfile.objects.filter(m3u_account_id=m3u_account_id)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
# Get the account ID from the URL
|
||||
account_id = self.kwargs['account_id']
|
||||
account_id = self.kwargs["account_id"]
|
||||
|
||||
# Get the M3UAccount instance for the account_id
|
||||
m3u_account = M3UAccount.objects.get(id=account_id)
|
||||
|
||||
# Save the 'm3u_account' in the serializer context
|
||||
serializer.context['m3u_account'] = m3u_account
|
||||
serializer.context["m3u_account"] = m3u_account
|
||||
|
||||
# Perform the actual save
|
||||
serializer.save(m3u_account_id=m3u_account)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ from .models import M3UAccount, M3UFilter
|
|||
import re
|
||||
|
||||
class M3UAccountForm(forms.ModelForm):
|
||||
enable_vod = forms.BooleanField(
|
||||
required=False,
|
||||
initial=False,
|
||||
label="Enable VOD Content",
|
||||
help_text="Parse and import VOD (movies/series) content for XtreamCodes accounts"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
fields = [
|
||||
|
|
@ -13,8 +20,34 @@ class M3UAccountForm(forms.ModelForm):
|
|||
'server_group',
|
||||
'max_streams',
|
||||
'is_active',
|
||||
'enable_vod',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Set initial value for enable_vod from custom_properties
|
||||
if self.instance and self.instance.custom_properties:
|
||||
custom_props = self.instance.custom_properties or {}
|
||||
self.fields['enable_vod'].initial = custom_props.get('enable_vod', False)
|
||||
|
||||
def save(self, commit=True):
|
||||
instance = super().save(commit=False)
|
||||
|
||||
# Handle enable_vod field
|
||||
enable_vod = self.cleaned_data.get('enable_vod', False)
|
||||
|
||||
# Parse existing custom_properties
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
# Update VOD preference
|
||||
custom_props['enable_vod'] = enable_vod
|
||||
instance.custom_properties = custom_props
|
||||
|
||||
if commit:
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def clean_uploaded_file(self):
|
||||
uploaded_file = self.cleaned_data.get('uploaded_file')
|
||||
if uploaded_file:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
from django.db import migrations
|
||||
from core.models import CoreSettings
|
||||
|
||||
|
||||
def create_custom_account(apps, schema_editor):
|
||||
default_user_agent_id = CoreSettings.get_default_user_agent_id()
|
||||
|
||||
|
|
@ -18,7 +19,7 @@ def create_custom_account(apps, schema_editor):
|
|||
M3UAccountProfile = apps.get_model("m3u", "M3UAccountProfile")
|
||||
M3UAccountProfile.objects.create(
|
||||
m3u_account=m3u_account,
|
||||
name=f'{m3u_account.name} Default',
|
||||
name=f"{m3u_account.name} Default",
|
||||
max_streams=m3u_account.max_streams,
|
||||
is_default=True,
|
||||
is_active=True,
|
||||
|
|
@ -26,10 +27,12 @@ def create_custom_account(apps, schema_editor):
|
|||
replace_pattern="$1",
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0002_m3uaccount_locked'),
|
||||
("m3u", "0002_m3uaccount_locked"),
|
||||
("core", "0004_preload_core_settings"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
|
|
|||
|
|
@ -7,24 +7,29 @@ from django.db import migrations, models
|
|||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('django_celery_beat', '0019_alter_periodictasks_options'),
|
||||
('m3u', '0004_m3uaccount_stream_profile'),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
("m3u", "0004_m3uaccount_stream_profile"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='custom_properties',
|
||||
model_name="m3uaccount",
|
||||
name="custom_properties",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_interval',
|
||||
model_name="m3uaccount",
|
||||
name="refresh_interval",
|
||||
field=models.IntegerField(default=24),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_task',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='django_celery_beat.periodictask'),
|
||||
model_name="m3uaccount",
|
||||
name="refresh_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
|||
18
apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py
Normal file
18
apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0007_remove_m3uaccount_uploaded_file_m3uaccount_file_path'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='stale_stream_days',
|
||||
field=models.PositiveIntegerField(default=7, help_text='Number of days after which a stream will be removed if not seen in the M3U source.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.1.6 on 2025-04-27 12:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0008_m3uaccount_stale_stream_days'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='account_type',
|
||||
field=models.CharField(choices=[('STD', 'Standard'), ('XC', 'Xtream Codes')], default='STD'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='password',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='username',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-04 21:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0009_m3uaccount_account_type_m3uaccount_password_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='last_message',
|
||||
field=models.TextField(blank=True, null=True, help_text="Last status message, including success results or error information"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success')], default='idle', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(blank=True, help_text='Time when this account was last successfully refreshed', null=True),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0011_alter_m3uaccount_status.py
Normal file
18
apps/m3u/migrations/0011_alter_m3uaccount_status.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-15 01:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0010_add_status_fields_and_remove_auto_now'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('idle', 'Idle'), ('fetching', 'Fetching'), ('parsing', 'Parsing'), ('error', 'Error'), ('success', 'Success'), ('pending_setup', 'Pending Setup'), ('disabled', 'Disabled')], default='idle', max_length=20),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-21 19:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0011_alter_m3uaccount_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_interval',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py
Normal file
18
apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-22 21:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0012_alter_m3uaccount_refresh_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3ufilter',
|
||||
name='filter_type',
|
||||
field=models.CharField(choices=[('group', 'Group'), ('name', 'Stream Name'), ('url', 'Stream URL')], default='group', help_text='Filter based on either group title or stream name.', max_length=50),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-31 17:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0013_alter_m3ufilter_filter_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='m3ufilter',
|
||||
options={'ordering': ['order']},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3ufilter',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-02 16:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0014_alter_m3ufilter_options_m3ufilter_order'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='m3ufilter',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3ufilter',
|
||||
name='custom_properties',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-20 22:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0015_alter_m3ufilter_options_m3ufilter_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 15:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0016_m3uaccount_priority'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='server_url',
|
||||
field=models.URLField(blank=True, help_text='The base URL of the M3U server (optional if a file is uploaded)', max_length=1000, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='m3ufilter',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0018_add_profile_custom_properties.py
Normal file
18
apps/m3u/migrations/0018_add_profile_custom_properties.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-09 20:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0017_alter_m3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccountprofile',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for storing account information from provider (e.g., XC account details, expiration dates)', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -7,73 +7,98 @@ from apps.channels.models import StreamProfile
|
|||
from django_celery_beat.models import PeriodicTask
|
||||
from core.models import CoreSettings, UserAgent
|
||||
|
||||
CUSTOM_M3U_ACCOUNT_NAME="custom"
|
||||
CUSTOM_M3U_ACCOUNT_NAME = "custom"
|
||||
|
||||
|
||||
class M3UAccount(models.Model):
|
||||
class Types(models.TextChoices):
|
||||
STADNARD = "STD", "Standard"
|
||||
XC = "XC", "Xtream Codes"
|
||||
|
||||
class Status(models.TextChoices):
|
||||
IDLE = "idle", "Idle"
|
||||
FETCHING = "fetching", "Fetching"
|
||||
PARSING = "parsing", "Parsing"
|
||||
ERROR = "error", "Error"
|
||||
SUCCESS = "success", "Success"
|
||||
PENDING_SETUP = "pending_setup", "Pending Setup"
|
||||
DISABLED = "disabled", "Disabled"
|
||||
|
||||
"""Represents an M3U Account for IPTV streams."""
|
||||
name = models.CharField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
help_text="Unique name for this M3U account"
|
||||
max_length=255, unique=True, help_text="Unique name for this M3U account"
|
||||
)
|
||||
server_url = models.URLField(
|
||||
max_length=1000,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="The base URL of the M3U server (optional if a file is uploaded)"
|
||||
)
|
||||
file_path = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True
|
||||
help_text="The base URL of the M3U server (optional if a file is uploaded)",
|
||||
)
|
||||
file_path = models.CharField(max_length=255, blank=True, null=True)
|
||||
server_group = models.ForeignKey(
|
||||
'ServerGroup',
|
||||
"ServerGroup",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts',
|
||||
help_text="The server group this M3U account belongs to"
|
||||
related_name="m3u_accounts",
|
||||
help_text="The server group this M3U account belongs to",
|
||||
)
|
||||
max_streams = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Set to false to deactivate this M3U account"
|
||||
default=True, help_text="Set to false to deactivate this M3U account"
|
||||
)
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Time when this account was created"
|
||||
auto_now_add=True, help_text="Time when this account was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="Time when this account was last updated"
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Time when this account was last successfully refreshed",
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20, choices=Status.choices, default=Status.IDLE
|
||||
)
|
||||
last_message = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Last status message, including success results or error information",
|
||||
)
|
||||
user_agent = models.ForeignKey(
|
||||
'core.UserAgent',
|
||||
"core.UserAgent",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts',
|
||||
help_text="The User-Agent associated with this M3U account."
|
||||
related_name="m3u_accounts",
|
||||
help_text="The User-Agent associated with this M3U account.",
|
||||
)
|
||||
locked = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Protected - can't be deleted or modified"
|
||||
default=False, help_text="Protected - can't be deleted or modified"
|
||||
)
|
||||
stream_profile = models.ForeignKey(
|
||||
StreamProfile,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts'
|
||||
related_name="m3u_accounts",
|
||||
)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
refresh_interval = models.IntegerField(default=24)
|
||||
account_type = models.CharField(choices=Types.choices, default=Types.STADNARD)
|
||||
username = models.CharField(max_length=255, null=True, blank=True)
|
||||
password = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
refresh_interval = models.IntegerField(default=0)
|
||||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
stale_stream_days = models.PositiveIntegerField(
|
||||
default=7,
|
||||
help_text="Number of days after which a stream will be removed if not seen in the M3U source.",
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
|
@ -104,10 +129,21 @@ class M3UAccount(models.Model):
|
|||
def get_user_agent(self):
|
||||
user_agent = self.user_agent
|
||||
if not user_agent:
|
||||
user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
|
||||
user_agent = UserAgent.objects.get(
|
||||
id=CoreSettings.get_default_user_agent_id()
|
||||
)
|
||||
|
||||
return user_agent
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Prevent auto_now behavior by handling updated_at manually
|
||||
if "update_fields" in kwargs and "updated_at" not in kwargs["update_fields"]:
|
||||
# Don't modify updated_at for regular updates
|
||||
kwargs.setdefault("update_fields", [])
|
||||
if "updated_at" in kwargs["update_fields"]:
|
||||
kwargs["update_fields"].remove("updated_at")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# def get_channel_groups(self):
|
||||
# return ChannelGroup.objects.filter(m3u_account__m3u_account=self)
|
||||
|
||||
|
|
@ -119,35 +155,40 @@ class M3UAccount(models.Model):
|
|||
# """Return all streams linked to this account with enabled ChannelGroups."""
|
||||
# return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True))
|
||||
|
||||
|
||||
class M3UFilter(models.Model):
|
||||
"""Defines filters for M3U accounts based on stream name or group title."""
|
||||
|
||||
FILTER_TYPE_CHOICES = (
|
||||
('group', 'Group Title'),
|
||||
('name', 'Stream Name'),
|
||||
("group", "Group"),
|
||||
("name", "Stream Name"),
|
||||
("url", "Stream URL"),
|
||||
)
|
||||
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='filters',
|
||||
help_text="The M3U account this filter is applied to."
|
||||
related_name="filters",
|
||||
help_text="The M3U account this filter is applied to.",
|
||||
)
|
||||
filter_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=FILTER_TYPE_CHOICES,
|
||||
default='group',
|
||||
help_text="Filter based on either group title or stream name."
|
||||
default="group",
|
||||
help_text="Filter based on either group title or stream name.",
|
||||
)
|
||||
regex_pattern = models.CharField(
|
||||
max_length=200,
|
||||
help_text="A regex pattern to match streams or groups."
|
||||
max_length=200, help_text="A regex pattern to match streams or groups."
|
||||
)
|
||||
exclude = models.BooleanField(
|
||||
default=True,
|
||||
help_text="If True, matching items are excluded; if False, only matches are included."
|
||||
help_text="If True, matching items are excluded; if False, only matches are included.",
|
||||
)
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def applies_to(self, stream_name, group_name):
|
||||
target = group_name if self.filter_type == 'group' else stream_name
|
||||
target = group_name if self.filter_type == "group" else stream_name
|
||||
return bool(re.search(self.regex_pattern, target, re.IGNORECASE))
|
||||
|
||||
def clean(self):
|
||||
|
|
@ -157,7 +198,9 @@ class M3UFilter(models.Model):
|
|||
raise ValidationError(f"Invalid regex pattern: {self.regex_pattern}")
|
||||
|
||||
def __str__(self):
|
||||
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(self.filter_type, 'Unknown')
|
||||
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(
|
||||
self.filter_type, "Unknown"
|
||||
)
|
||||
exclude_status = "Exclude" if self.exclude else "Include"
|
||||
return f"[{self.m3u_account.name}] {filter_type_display}: {self.regex_pattern} ({exclude_status})"
|
||||
|
||||
|
|
@ -183,40 +226,35 @@ class M3UFilter(models.Model):
|
|||
|
||||
class ServerGroup(models.Model):
|
||||
"""Represents a logical grouping of servers or channels."""
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
unique=True,
|
||||
help_text="Unique name for this server group."
|
||||
max_length=100, unique=True, help_text="Unique name for this server group."
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
from django.db import models
|
||||
|
||||
class M3UAccountProfile(models.Model):
|
||||
"""Represents a profile associated with an M3U Account."""
|
||||
|
||||
m3u_account = models.ForeignKey(
|
||||
'M3UAccount',
|
||||
"M3UAccount",
|
||||
on_delete=models.CASCADE,
|
||||
related_name='profiles',
|
||||
help_text="The M3U account this profile belongs to."
|
||||
related_name="profiles",
|
||||
help_text="The M3U account this profile belongs to.",
|
||||
)
|
||||
name = models.CharField(
|
||||
max_length=255,
|
||||
help_text="Name for the M3U account profile"
|
||||
max_length=255, help_text="Name for the M3U account profile"
|
||||
)
|
||||
is_default = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Set to false to deactivate this profile"
|
||||
default=False, help_text="Set to false to deactivate this profile"
|
||||
)
|
||||
max_streams = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Set to false to deactivate this profile"
|
||||
default=True, help_text="Set to false to deactivate this profile"
|
||||
)
|
||||
search_pattern = models.CharField(
|
||||
max_length=255,
|
||||
|
|
@ -225,22 +263,95 @@ class M3UAccountProfile(models.Model):
|
|||
max_length=255,
|
||||
)
|
||||
current_viewers = models.PositiveIntegerField(default=0)
|
||||
custom_properties = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Custom properties for storing account information from provider (e.g., XC account details, expiration dates)"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['m3u_account', 'name'], name='unique_account_name')
|
||||
models.UniqueConstraint(
|
||||
fields=["m3u_account", "name"], name="unique_account_name"
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} ({self.m3u_account.name})"
|
||||
|
||||
def get_account_expiration(self):
|
||||
"""Get account expiration date from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
exp_date = user_info.get('exp_date')
|
||||
|
||||
if exp_date:
|
||||
try:
|
||||
from datetime import datetime
|
||||
# XC exp_date is typically a Unix timestamp
|
||||
if isinstance(exp_date, (int, float)):
|
||||
return datetime.fromtimestamp(exp_date)
|
||||
elif isinstance(exp_date, str):
|
||||
# Try to parse as timestamp first, then as ISO date
|
||||
try:
|
||||
return datetime.fromtimestamp(float(exp_date))
|
||||
except ValueError:
|
||||
return datetime.fromisoformat(exp_date)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def get_account_status(self):
|
||||
"""Get account status from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('status')
|
||||
|
||||
def get_max_connections(self):
|
||||
"""Get maximum connections from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('max_connections')
|
||||
|
||||
def get_active_connections(self):
|
||||
"""Get active connections from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('active_cons')
|
||||
|
||||
def get_last_refresh(self):
|
||||
"""Get last refresh timestamp from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
last_refresh = self.custom_properties.get('last_refresh')
|
||||
if last_refresh:
|
||||
try:
|
||||
from datetime import datetime
|
||||
return datetime.fromisoformat(last_refresh)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=M3UAccount)
|
||||
def create_profile_for_m3u_account(sender, instance, created, **kwargs):
|
||||
"""Automatically create an M3UAccountProfile when M3UAccount is created."""
|
||||
if created:
|
||||
M3UAccountProfile.objects.create(
|
||||
m3u_account=instance,
|
||||
name=f'{instance.name} Default',
|
||||
name=f"{instance.name} Default",
|
||||
max_streams=instance.max_streams,
|
||||
is_default=True,
|
||||
is_active=True,
|
||||
|
|
@ -253,6 +364,5 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs):
|
|||
is_default=True,
|
||||
)
|
||||
|
||||
|
||||
profile.max_streams = instance.max_streams
|
||||
profile.save()
|
||||
|
|
|
|||
|
|
@ -1,41 +1,106 @@
|
|||
from rest_framework import serializers
|
||||
from core.utils import validate_flexible_url
|
||||
from rest_framework import serializers, status
|
||||
from rest_framework.response import Response
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
||||
from core.models import UserAgent
|
||||
from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount
|
||||
from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer
|
||||
from apps.channels.serializers import (
|
||||
ChannelGroupM3UAccountSerializer,
|
||||
)
|
||||
import logging
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class M3UFilterSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for M3U Filters"""
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True)
|
||||
|
||||
class Meta:
|
||||
model = M3UFilter
|
||||
fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups']
|
||||
fields = [
|
||||
"id",
|
||||
"filter_type",
|
||||
"regex_pattern",
|
||||
"exclude",
|
||||
"order",
|
||||
"custom_properties",
|
||||
]
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import M3UAccountProfile
|
||||
|
||||
class M3UAccountProfileSerializer(serializers.ModelSerializer):
|
||||
account = serializers.SerializerMethodField()
|
||||
|
||||
def get_account(self, obj):
|
||||
"""Include basic account information for frontend use"""
|
||||
return {
|
||||
'id': obj.m3u_account.id,
|
||||
'name': obj.m3u_account.name,
|
||||
'account_type': obj.m3u_account.account_type,
|
||||
'is_xtream_codes': obj.m3u_account.account_type == 'XC'
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = M3UAccountProfile
|
||||
fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern']
|
||||
read_only_fields = ['id']
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"max_streams",
|
||||
"is_active",
|
||||
"is_default",
|
||||
"current_viewers",
|
||||
"search_pattern",
|
||||
"replace_pattern",
|
||||
"custom_properties",
|
||||
"account",
|
||||
]
|
||||
read_only_fields = ["id", "account"]
|
||||
extra_kwargs = {
|
||||
'search_pattern': {'required': False, 'allow_blank': True},
|
||||
'replace_pattern': {'required': False, 'allow_blank': True},
|
||||
}
|
||||
|
||||
def create(self, validated_data):
|
||||
m3u_account = self.context.get('m3u_account')
|
||||
m3u_account = self.context.get("m3u_account")
|
||||
|
||||
# Use the m3u_account when creating the profile
|
||||
validated_data['m3u_account_id'] = m3u_account.id
|
||||
validated_data["m3u_account_id"] = m3u_account.id
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def validate(self, data):
|
||||
"""Custom validation to handle default profiles"""
|
||||
# For updates to existing instances
|
||||
if self.instance and self.instance.is_default:
|
||||
# For default profiles, search_pattern and replace_pattern are not required
|
||||
# and we don't want to validate them since they shouldn't be changed
|
||||
return data
|
||||
|
||||
# For non-default profiles or new profiles, ensure required fields are present
|
||||
if not data.get('search_pattern'):
|
||||
raise serializers.ValidationError({
|
||||
'search_pattern': ['This field is required for non-default profiles.']
|
||||
})
|
||||
if not data.get('replace_pattern'):
|
||||
raise serializers.ValidationError({
|
||||
'replace_pattern': ['This field is required for non-default profiles.']
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if instance.is_default:
|
||||
raise serializers.ValidationError("Default profiles cannot be modified.")
|
||||
# For default profiles, only allow updating name and custom_properties (for notes)
|
||||
allowed_fields = {'name', 'custom_properties'}
|
||||
|
||||
# Remove any fields that aren't allowed for default profiles
|
||||
disallowed_fields = set(validated_data.keys()) - allowed_fields
|
||||
if disallowed_fields:
|
||||
raise serializers.ValidationError(
|
||||
f"Default profiles can only modify name and notes. "
|
||||
f"Cannot modify: {', '.join(disallowed_fields)}"
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
|
|
@ -43,13 +108,15 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer):
|
|||
if instance.is_default:
|
||||
return Response(
|
||||
{"error": "Default profiles cannot be deleted."},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class M3UAccountSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for M3U Account"""
|
||||
filters = M3UFilterSerializer(many=True, read_only=True)
|
||||
|
||||
filters = serializers.SerializerMethodField()
|
||||
# Include user_agent as a mandatory field using its primary key.
|
||||
user_agent = serializers.PrimaryKeyRelatedField(
|
||||
queryset=UserAgent.objects.all(),
|
||||
|
|
@ -57,21 +124,96 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
allow_null=True,
|
||||
)
|
||||
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
|
||||
read_only_fields = ['locked', 'created_at', 'updated_at']
|
||||
read_only_fields = ["locked", "created_at", "updated_at"]
|
||||
# channel_groups = serializers.SerializerMethodField()
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(
|
||||
source="channel_group", many=True, required=False
|
||||
)
|
||||
server_url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url],
|
||||
)
|
||||
enable_vod = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_live = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_vod = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_series = serializers.BooleanField(required=False, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
fields = [
|
||||
'id', 'name', 'server_url', 'file_path', 'server_group',
|
||||
'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked',
|
||||
'channel_groups', 'refresh_interval'
|
||||
"id",
|
||||
"name",
|
||||
"server_url",
|
||||
"file_path",
|
||||
"server_group",
|
||||
"max_streams",
|
||||
"is_active",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"filters",
|
||||
"user_agent",
|
||||
"profiles",
|
||||
"locked",
|
||||
"channel_groups",
|
||||
"refresh_interval",
|
||||
"custom_properties",
|
||||
"account_type",
|
||||
"username",
|
||||
"password",
|
||||
"stale_stream_days",
|
||||
"priority",
|
||||
"status",
|
||||
"last_message",
|
||||
"enable_vod",
|
||||
"auto_enable_new_groups_live",
|
||||
"auto_enable_new_groups_vod",
|
||||
"auto_enable_new_groups_series",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"password": {
|
||||
"required": False,
|
||||
"allow_blank": True,
|
||||
},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
# Parse custom_properties to get VOD preference and auto_enable_new_groups settings
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
data["enable_vod"] = custom_props.get("enable_vod", False)
|
||||
data["auto_enable_new_groups_live"] = custom_props.get("auto_enable_new_groups_live", True)
|
||||
data["auto_enable_new_groups_vod"] = custom_props.get("auto_enable_new_groups_vod", True)
|
||||
data["auto_enable_new_groups_series"] = custom_props.get("auto_enable_new_groups_series", True)
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# Handle enable_vod preference and auto_enable_new_groups settings
|
||||
enable_vod = validated_data.pop("enable_vod", None)
|
||||
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", None)
|
||||
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", None)
|
||||
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", None)
|
||||
|
||||
# Get existing custom_properties
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
# Update preferences
|
||||
if enable_vod is not None:
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
if auto_enable_new_groups_live is not None:
|
||||
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
|
||||
if auto_enable_new_groups_vod is not None:
|
||||
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
|
||||
if auto_enable_new_groups_series is not None:
|
||||
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
|
||||
|
||||
validated_data["custom_properties"] = custom_props
|
||||
|
||||
# Pop out channel group memberships so we can handle them manually
|
||||
channel_group_data = validated_data.pop('channel_group', [])
|
||||
channel_group_data = validated_data.pop("channel_group", [])
|
||||
|
||||
# First, update the M3UAccount itself
|
||||
for attr, value in validated_data.items():
|
||||
|
|
@ -81,13 +223,12 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
# Prepare a list of memberships to update
|
||||
memberships_to_update = []
|
||||
for group_data in channel_group_data:
|
||||
group = group_data.get('channel_group')
|
||||
enabled = group_data.get('enabled')
|
||||
group = group_data.get("channel_group")
|
||||
enabled = group_data.get("enabled")
|
||||
|
||||
try:
|
||||
membership = ChannelGroupM3UAccount.objects.get(
|
||||
m3u_account=instance,
|
||||
channel_group=group
|
||||
m3u_account=instance, channel_group=group
|
||||
)
|
||||
membership.enabled = enabled
|
||||
memberships_to_update.append(membership)
|
||||
|
|
@ -96,13 +237,39 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
|
||||
# Perform the bulk update
|
||||
if memberships_to_update:
|
||||
ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled'])
|
||||
ChannelGroupM3UAccount.objects.bulk_update(
|
||||
memberships_to_update, ["enabled"]
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def create(self, validated_data):
|
||||
# Handle enable_vod preference and auto_enable_new_groups settings during creation
|
||||
enable_vod = validated_data.pop("enable_vod", False)
|
||||
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", True)
|
||||
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", True)
|
||||
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", True)
|
||||
|
||||
# Parse existing custom_properties or create new
|
||||
custom_props = validated_data.get("custom_properties", {})
|
||||
|
||||
# Set preferences (default to True for auto_enable_new_groups)
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
|
||||
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
|
||||
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
|
||||
validated_data["custom_properties"] = custom_props
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def get_filters(self, obj):
|
||||
filters = obj.filters.order_by("order")
|
||||
return M3UFilterSerializer(filters, many=True).data
|
||||
|
||||
|
||||
class ServerGroupSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Server Group"""
|
||||
|
||||
class Meta:
|
||||
model = ServerGroup
|
||||
fields = ['id', 'name']
|
||||
fields = ["id", "name"]
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
# apps/m3u/signals.py
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.db.models.signals import post_save, post_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from .models import M3UAccount
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_groups
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_groups, delete_m3u_refresh_task_by_id
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@receiver(post_save, sender=M3UAccount)
|
||||
def refresh_account_on_save(sender, instance, created, **kwargs):
|
||||
|
|
@ -13,7 +16,7 @@ def refresh_account_on_save(sender, instance, created, **kwargs):
|
|||
call a Celery task that fetches & parses that single account
|
||||
if it is active or newly created.
|
||||
"""
|
||||
if created:
|
||||
if created and instance.account_type != M3UAccount.Types.XC:
|
||||
refresh_m3u_groups.delay(instance.id)
|
||||
|
||||
@receiver(post_save, sender=M3UAccount)
|
||||
|
|
@ -28,21 +31,17 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
|
|||
period=IntervalSchedule.HOURS
|
||||
)
|
||||
|
||||
if not instance.refresh_task:
|
||||
refresh_task = PeriodicTask.objects.create(
|
||||
name=task_name,
|
||||
interval=interval,
|
||||
task="apps.m3u.tasks.refresh_single_m3u_account",
|
||||
kwargs=json.dumps({"account_id": instance.id}),
|
||||
enabled=instance.refresh_interval != 0,
|
||||
)
|
||||
M3UAccount.objects.filter(id=instance.id).update(refresh_task=refresh_task)
|
||||
else:
|
||||
task = instance.refresh_task
|
||||
# Task should be enabled only if refresh_interval != 0 AND account is active
|
||||
should_be_enabled = (instance.refresh_interval != 0) and instance.is_active
|
||||
|
||||
# First check if the task already exists to avoid validation errors
|
||||
try:
|
||||
task = PeriodicTask.objects.get(name=task_name)
|
||||
# Task exists, just update it
|
||||
updated_fields = []
|
||||
|
||||
if task.enabled != (instance.refresh_interval != 0):
|
||||
task.enabled = instance.refresh_interval != 0
|
||||
if task.enabled != should_be_enabled:
|
||||
task.enabled = should_be_enabled
|
||||
updated_fields.append("enabled")
|
||||
|
||||
if task.interval != interval:
|
||||
|
|
@ -52,11 +51,60 @@ def create_or_update_refresh_task(sender, instance, **kwargs):
|
|||
if updated_fields:
|
||||
task.save(update_fields=updated_fields)
|
||||
|
||||
# Ensure instance has the task
|
||||
if instance.refresh_task_id != task.id:
|
||||
M3UAccount.objects.filter(id=instance.id).update(refresh_task=task)
|
||||
|
||||
except PeriodicTask.DoesNotExist:
|
||||
# Create new task if it doesn't exist
|
||||
refresh_task = PeriodicTask.objects.create(
|
||||
name=task_name,
|
||||
interval=interval,
|
||||
task="apps.m3u.tasks.refresh_single_m3u_account",
|
||||
kwargs=json.dumps({"account_id": instance.id}),
|
||||
enabled=should_be_enabled,
|
||||
)
|
||||
M3UAccount.objects.filter(id=instance.id).update(refresh_task=refresh_task)
|
||||
|
||||
@receiver(post_delete, sender=M3UAccount)
|
||||
def delete_refresh_task(sender, instance, **kwargs):
|
||||
"""
|
||||
Delete the associated Celery Beat periodic task when a Channel is deleted.
|
||||
"""
|
||||
if instance.refresh_task:
|
||||
instance.refresh_task.interval.delete()
|
||||
instance.refresh_task.delete()
|
||||
try:
|
||||
# First try the foreign key relationship to find the task ID
|
||||
task = None
|
||||
if instance.refresh_task:
|
||||
logger.info(f"Found task via foreign key: {instance.refresh_task.id} for M3UAccount {instance.id}")
|
||||
task = instance.refresh_task
|
||||
|
||||
# Use the helper function to delete the task
|
||||
if task:
|
||||
delete_m3u_refresh_task_by_id(instance.id)
|
||||
else:
|
||||
# Otherwise use the helper function
|
||||
delete_m3u_refresh_task_by_id(instance.id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in delete_refresh_task signal handler: {str(e)}", exc_info=True)
|
||||
|
||||
@receiver(pre_save, sender=M3UAccount)
|
||||
def update_status_on_active_change(sender, instance, **kwargs):
|
||||
"""
|
||||
When an M3UAccount's is_active field changes, update the status accordingly.
|
||||
"""
|
||||
if instance.pk: # Only for existing records, not new ones
|
||||
try:
|
||||
# Get the current record from the database
|
||||
old_instance = M3UAccount.objects.get(pk=instance.pk)
|
||||
|
||||
# If is_active changed, update the status
|
||||
if old_instance.is_active != instance.is_active:
|
||||
if instance.is_active:
|
||||
# When activating, set status to idle
|
||||
instance.status = M3UAccount.Status.IDLE
|
||||
else:
|
||||
# When deactivating, set status to disabled
|
||||
instance.status = M3UAccount.Status.DISABLED
|
||||
except M3UAccount.DoesNotExist:
|
||||
# New record, will use default status
|
||||
pass
|
||||
|
|
|
|||
3015
apps/m3u/tasks.py
3015
apps/m3u/tasks.py
File diff suppressed because it is too large
Load diff
|
|
@ -1,9 +1,40 @@
|
|||
# apps/m3u/utils.py
|
||||
import threading
|
||||
import logging
|
||||
from django.db import models
|
||||
|
||||
lock = threading.Lock()
|
||||
# Dictionary to track usage: {m3u_account_id: current_usage}
|
||||
active_streams_map = {}
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_stream_url(url):
|
||||
"""
|
||||
Normalize stream URLs for compatibility with FFmpeg.
|
||||
|
||||
Handles VLC-specific syntax like udp://@239.0.0.1:1234 by removing the @ symbol.
|
||||
FFmpeg doesn't recognize the @ prefix for multicast addresses.
|
||||
|
||||
Args:
|
||||
url (str): The stream URL to normalize
|
||||
|
||||
Returns:
|
||||
str: The normalized URL
|
||||
"""
|
||||
if not url:
|
||||
return url
|
||||
|
||||
# Handle VLC-style UDP multicast URLs: udp://@239.0.0.1:1234 -> udp://239.0.0.1:1234
|
||||
# The @ symbol in VLC means "listen on all interfaces" but FFmpeg doesn't use this syntax
|
||||
if url.startswith('udp://@'):
|
||||
normalized = url.replace('udp://@', 'udp://', 1)
|
||||
logger.debug(f"Normalized VLC-style UDP URL: {url} -> {normalized}")
|
||||
return normalized
|
||||
|
||||
# Could add other normalizations here in the future (rtp://@, etc.)
|
||||
return url
|
||||
|
||||
|
||||
def increment_stream_count(account):
|
||||
with lock:
|
||||
|
|
@ -24,3 +55,64 @@ def decrement_stream_count(account):
|
|||
active_streams_map[account.id] = current_usage
|
||||
account.active_streams = current_usage
|
||||
account.save(update_fields=['active_streams'])
|
||||
|
||||
|
||||
def calculate_tuner_count(minimum=1, unlimited_default=10):
|
||||
"""
|
||||
Calculate tuner/connection count from active M3U profiles and custom streams.
|
||||
This is the centralized function used by both HDHR and XtreamCodes APIs.
|
||||
|
||||
Args:
|
||||
minimum (int): Minimum number to return (default: 1)
|
||||
unlimited_default (int): Default value when unlimited profiles exist (default: 10)
|
||||
|
||||
Returns:
|
||||
int: Calculated tuner/connection count
|
||||
"""
|
||||
try:
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
from apps.channels.models import Stream
|
||||
|
||||
# Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile)
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
is_active=True,
|
||||
m3u_account__is_active=True, # Only include profiles from enabled M3U accounts
|
||||
).exclude(id=1)
|
||||
|
||||
# 1. Check if any profile has unlimited streams (max_streams=0)
|
||||
has_unlimited = profiles.filter(max_streams=0).exists()
|
||||
|
||||
# 2. Calculate tuner count from limited profiles
|
||||
limited_tuners = 0
|
||||
if not has_unlimited:
|
||||
limited_tuners = (
|
||||
profiles.filter(max_streams__gt=0)
|
||||
.aggregate(total=models.Sum("max_streams"))
|
||||
.get("total", 0)
|
||||
or 0
|
||||
)
|
||||
|
||||
# 3. Add custom stream count to tuner count
|
||||
custom_stream_count = Stream.objects.filter(is_custom=True).count()
|
||||
logger.debug(f"Found {custom_stream_count} custom streams")
|
||||
|
||||
# 4. Calculate final tuner count
|
||||
if has_unlimited:
|
||||
# If there are unlimited profiles, start with unlimited_default plus custom streams
|
||||
tuner_count = unlimited_default + custom_stream_count
|
||||
else:
|
||||
# Otherwise use the limited profile sum plus custom streams
|
||||
tuner_count = limited_tuners + custom_stream_count
|
||||
|
||||
# 5. Ensure minimum number
|
||||
tuner_count = max(minimum, tuner_count)
|
||||
|
||||
logger.debug(
|
||||
f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})"
|
||||
)
|
||||
|
||||
return tuner_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating tuner count: {e}")
|
||||
return minimum # Fallback to minimum value
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from django.views import View
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.http import JsonResponse
|
||||
from apps.m3u.models import M3UAccount
|
||||
import json
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue