logging
This commit is contained in:
parent
a1fda52260
commit
9dee262fbf
1 changed files with 72 additions and 1 deletions
73
monitor.py
73
monitor.py
|
|
@ -8,7 +8,7 @@ import html
|
|||
import threading
|
||||
import time
|
||||
import csv
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
|
@ -58,6 +58,65 @@ APPLICATIONS_FILE = DATA_DIR / "applications.json"
|
|||
|
||||
# WGcompany specific files
|
||||
WGCOMPANY_LISTINGS_FILE = DATA_DIR / "wgcompany_listings.json"
|
||||
|
||||
|
||||
def _cleanup_old_files(png_hours: int = 24, log_days: int = 7):
|
||||
"""Remove PNG files older than `png_hours` and prune log lines older than `log_days` days.
|
||||
|
||||
Runs best-effort and logs exceptions to the logger.
|
||||
"""
|
||||
try:
|
||||
now = datetime.utcnow()
|
||||
|
||||
# Remove old PNGs in DATA_DIR
|
||||
png_cutoff = now - timedelta(hours=png_hours)
|
||||
removed_pngs = 0
|
||||
for p in DATA_DIR.glob("*.png"):
|
||||
try:
|
||||
mtime = datetime.fromtimestamp(p.stat().st_mtime)
|
||||
if mtime < png_cutoff:
|
||||
p.unlink()
|
||||
removed_pngs += 1
|
||||
except Exception:
|
||||
logger.exception(f"Error while checking/removing PNG: {p}")
|
||||
if removed_pngs:
|
||||
logger.info(f"Removed {removed_pngs} PNG(s) older than {png_hours} hours")
|
||||
|
||||
# Prune logfile lines older than log_days
|
||||
if LOG_FILE.exists():
|
||||
cutoff_log = now - timedelta(days=log_days)
|
||||
kept_lines = []
|
||||
try:
|
||||
with open(LOG_FILE, "r", encoding="utf-8", errors="ignore") as f:
|
||||
for line in f:
|
||||
# Expect logging lines starting with 'YYYY-MM-DD HH:MM:SS,ms - '
|
||||
m = re.match(r"^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d+)\s+-\s+", line)
|
||||
if m:
|
||||
try:
|
||||
ts = datetime.strptime(m.group(1), "%Y-%m-%d %H:%M:%S,%f")
|
||||
if ts >= cutoff_log:
|
||||
kept_lines.append(line)
|
||||
except Exception:
|
||||
# If parsing fails, keep the line
|
||||
kept_lines.append(line)
|
||||
else:
|
||||
# Keep non-standard lines
|
||||
kept_lines.append(line)
|
||||
# Atomically replace the logfile with kept lines
|
||||
if kept_lines:
|
||||
tmp = LOG_FILE.with_suffix(".tmp")
|
||||
with open(tmp, "w", encoding="utf-8") as f:
|
||||
f.writelines(kept_lines)
|
||||
tmp.replace(LOG_FILE)
|
||||
else:
|
||||
# No recent lines; truncate the file
|
||||
with open(LOG_FILE, "w", encoding="utf-8") as f:
|
||||
f.truncate(0)
|
||||
logger.info(f"Pruned logfile, kept {len(kept_lines)} lines from last {log_days} days")
|
||||
except Exception:
|
||||
logger.exception("Error while pruning logfile")
|
||||
except Exception:
|
||||
logger.exception("Unexpected error in cleanup task")
|
||||
WGCOMPANY_TIMING_FILE = DATA_DIR / "wgcompany_times.csv"
|
||||
|
||||
# Setup logging
|
||||
|
|
@ -2082,6 +2141,9 @@ def main():
|
|||
logger.info(f"InBerlin Autopilot: {'ENABLED' if inberlin_monitor.is_autopilot_enabled() else 'DISABLED'}")
|
||||
logger.info(f"WGcompany: {'ENABLED' if WGCOMPANY_ENABLED else 'DISABLED'}")
|
||||
|
||||
# Run periodic cleanup hourly
|
||||
last_cleanup = 0
|
||||
|
||||
while True:
|
||||
# Check InBerlinWohnen
|
||||
try:
|
||||
|
|
@ -2089,6 +2151,15 @@ def main():
|
|||
except Exception as e:
|
||||
logger.error(f"InBerlin check failed: {e}")
|
||||
|
||||
# Periodic cleanup: remove PNGs older than 24h and prune logs older than 7 days
|
||||
try:
|
||||
if time.time() - last_cleanup > 3600: # every hour
|
||||
logger.info("Running periodic cleanup (old PNGs, prune logs)")
|
||||
_cleanup_old_files(png_hours=24, log_days=7)
|
||||
last_cleanup = time.time()
|
||||
except Exception:
|
||||
logger.exception("Cleanup failed")
|
||||
|
||||
# Check WGcompany
|
||||
if wgcompany_monitor:
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue