more garbage collection, better use of m3u cache file usage

This commit is contained in:
dekzter 2025-04-06 12:35:43 -04:00
parent ecc96f8b69
commit 702c85e01b
2 changed files with 14 additions and 6 deletions

View file

@ -2,9 +2,9 @@
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from .models import M3UAccount
from .tasks import refresh_single_m3u_account, refresh_m3u_groups
from .tasks import refresh_m3u_groups
from django_celery_beat.models import PeriodicTask, IntervalSchedule
import json
import json, gc
@receiver(post_save, sender=M3UAccount)
def refresh_account_on_save(sender, instance, created, **kwargs):
@ -14,7 +14,11 @@ def refresh_account_on_save(sender, instance, created, **kwargs):
if it is active or newly created.
"""
if created:
refresh_m3u_groups(instance.id)
extinf_data, groups = refresh_m3u_groups(instance.id)
# Aggresive GC since we pulled in the whole file
del extinf_data, groups
gc.collect()
@receiver(post_save, sender=M3UAccount)
def create_or_update_refresh_task(sender, instance, **kwargs):

View file

@ -314,12 +314,12 @@ def refresh_m3u_groups(account_id, use_cache=False):
return extinf_data, groups
@shared_task
def refresh_single_m3u_account(account_id, use_cache=False):
def refresh_single_m3u_account(account_id):
"""Splits M3U processing into chunks and dispatches them as parallel tasks."""
if not acquire_task_lock('refresh_single_m3u_account', account_id):
return f"Task already running for account_id={account_id}."
redis_client = RedisClient.get_client()
# redis_client = RedisClient.get_client()
# Record start time
start_time = time.time()
send_progress_update(0, account_id)
@ -337,7 +337,7 @@ def refresh_single_m3u_account(account_id, use_cache=False):
groups = None
cache_path = os.path.join(m3u_dir, f"{account_id}.json")
if use_cache and os.path.exists(cache_path):
if os.path.exists(cache_path):
with open(cache_path, 'r') as file:
data = json.load(file)
@ -409,6 +409,10 @@ def refresh_single_m3u_account(account_id, use_cache=False):
del existing_groups, extinf_data, groups, batches
gc.collect()
# Clean up cache file since we've fully processed it
if os.path.exists(cache_path):
os.remove(cache_path)
release_task_lock('refresh_single_m3u_account', account_id)
# cursor = 0