mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Smarter logging for file change imports.
This commit is contained in:
parent
67aca64420
commit
d5b64a56d6
1 changed files with 26 additions and 17 deletions
|
|
@ -57,11 +57,6 @@ def scan_and_process_files():
|
|||
global _first_scan_completed
|
||||
redis_client = RedisClient.get_client()
|
||||
now = time.time()
|
||||
|
||||
# Add debug logging for the auto-import setting
|
||||
auto_import_value = CoreSettings.get_auto_import_mapped_files()
|
||||
logger.debug(f"Auto-import mapped files setting value: '{auto_import_value}' (type: {type(auto_import_value).__name__})")
|
||||
|
||||
# Check if directories exist
|
||||
dirs_exist = all(os.path.exists(d) for d in [M3U_WATCH_DIR, EPG_WATCH_DIR])
|
||||
if not dirs_exist:
|
||||
|
|
@ -101,11 +96,7 @@ def scan_and_process_files():
|
|||
|
||||
# File too new — probably still being written
|
||||
if age < MIN_AGE_SECONDS:
|
||||
# Use trace level if not first scan
|
||||
if _first_scan_completed:
|
||||
logger.trace(f"Skipping {filename}: Too new (age={age}s)")
|
||||
else:
|
||||
logger.debug(f"Skipping {filename}: Too new (age={age}s)")
|
||||
logger.debug(f"Skipping {filename}: Too new (age={age}s)")
|
||||
m3u_skipped += 1
|
||||
continue
|
||||
|
||||
|
|
@ -126,6 +117,13 @@ def scan_and_process_files():
|
|||
|
||||
redis_client.set(redis_key, mtime, ex=REDIS_TTL)
|
||||
|
||||
# More descriptive creation logging that includes active status
|
||||
if created:
|
||||
if m3u_account.is_active:
|
||||
logger.info(f"Created new M3U account '{filename}' (active)")
|
||||
else:
|
||||
logger.info(f"Created new M3U account '{filename}' (inactive due to auto-import setting)")
|
||||
|
||||
if not m3u_account.is_active:
|
||||
# Use trace level if not first scan
|
||||
if _first_scan_completed:
|
||||
|
|
@ -135,6 +133,10 @@ def scan_and_process_files():
|
|||
m3u_skipped += 1
|
||||
continue
|
||||
|
||||
# Log update for existing files (we've already logged creation above)
|
||||
if not created:
|
||||
logger.info(f"Detected update to existing M3U file: {filename}")
|
||||
|
||||
logger.info(f"Queueing refresh for M3U file: {filename}")
|
||||
refresh_single_m3u_account.delay(m3u_account.id)
|
||||
m3u_processed += 1
|
||||
|
|
@ -148,12 +150,12 @@ def scan_and_process_files():
|
|||
},
|
||||
)
|
||||
|
||||
logger.debug(f"M3U processing complete: {m3u_processed} processed, {m3u_skipped} skipped, {len(m3u_files)} total")
|
||||
logger.trace(f"M3U processing complete: {m3u_processed} processed, {m3u_skipped} skipped, {len(m3u_files)} total")
|
||||
|
||||
# Process EPG files
|
||||
try:
|
||||
epg_files = os.listdir(EPG_WATCH_DIR)
|
||||
logger.debug(f"Found {len(epg_files)} files in EPG directory")
|
||||
logger.trace(f"Found {len(epg_files)} files in EPG directory")
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing EPG directory: {e}")
|
||||
epg_files = []
|
||||
|
|
@ -232,12 +234,15 @@ def scan_and_process_files():
|
|||
"is_active": CoreSettings.get_auto_import_mapped_files() in [True, "true", "True"],
|
||||
})
|
||||
|
||||
# Add debug logging for created sources
|
||||
if created:
|
||||
logger.info(f"Created new EPG source '{filename}'")
|
||||
|
||||
redis_client.set(redis_key, mtime, ex=REDIS_TTL)
|
||||
|
||||
# More descriptive creation logging that includes active status
|
||||
if created:
|
||||
if epg_source.is_active:
|
||||
logger.info(f"Created new EPG source '{filename}' (active)")
|
||||
else:
|
||||
logger.info(f"Created new EPG source '{filename}' (inactive due to auto-import setting)")
|
||||
|
||||
if not epg_source.is_active:
|
||||
# Use trace level if not first scan
|
||||
if _first_scan_completed:
|
||||
|
|
@ -247,6 +252,10 @@ def scan_and_process_files():
|
|||
epg_skipped += 1
|
||||
continue
|
||||
|
||||
# Log update for existing files (we've already logged creation above)
|
||||
if not created:
|
||||
logger.info(f"Detected update to existing EPG file: {filename}")
|
||||
|
||||
logger.info(f"Queueing refresh for EPG file: {filename}")
|
||||
refresh_epg_data.delay(epg_source.id) # Trigger Celery task
|
||||
epg_processed += 1
|
||||
|
|
@ -256,7 +265,7 @@ def scan_and_process_files():
|
|||
epg_errors += 1
|
||||
continue
|
||||
|
||||
logger.debug(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors")
|
||||
logger.trace(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors")
|
||||
|
||||
# Mark that the first scan is complete
|
||||
_first_scan_completed = True
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue