Files
loogle-scripts/services/telegram-bot/daily_report.py

339 lines
9.9 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sqlite3
import os
import datetime
import urllib.request
import urllib.parse
import logging
import subprocess
import tempfile
import time
from logging.handlers import RotatingFileHandler
from typing import Optional, List, Tuple
DEBUG = os.environ.get("DEBUG", "0").strip() == "1"
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
LOG_FILE = os.path.join(SCRIPT_DIR, "daily_report.log")
TELEGRAM_CHAT_IDS = ["64463169", "24827341", "132455422", "5405962012"]
TOKEN_FILE_HOME = os.path.expanduser("~/.telegram_dpc_bot_token")
TOKEN_FILE_ETC = "/etc/telegram_dpc_bot_token"
DB_CANDIDATES = [
(os.environ.get("SPEEDTEST_DB") or "").strip(),
"/data/speedtest.sqlite",
os.path.join(SCRIPT_DIR, "speedtest.sqlite"),
os.path.join(SCRIPT_DIR, "database.sqlite"),
os.path.join(SCRIPT_DIR, "data", "speedtest.sqlite"),
os.path.join(SCRIPT_DIR, "data", "database.sqlite"),
]
SPEEDTEST_CONTAINER = (os.environ.get("SPEEDTEST_CONTAINER") or "speedtest-tracker").strip()
CONTAINER_DB_PATH = (os.environ.get("SPEEDTEST_CONTAINER_DB_PATH") or "/config/database.sqlite").strip()
WARN_DOWN = 400
WARN_UP = 100
# Unità download/upload nel DB:
# - default: byte/s => Mbps = (val*8)/1e6
# - se già bit/s: SPEEDTEST_VALUES_ARE_BITS=1
VALUES_ARE_BITS = os.environ.get("SPEEDTEST_VALUES_ARE_BITS", "0").strip() == "1"
# Quante righe recenti leggere dal DB (poi filtriamo in Python sulle ultime 24h)
MAX_ROWS = int(os.environ.get("SPEEDTEST_MAX_ROWS", "2000").strip())
def setup_logger() -> logging.Logger:
logger = logging.getLogger("daily_report")
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
logger.handlers.clear()
fh = RotatingFileHandler(LOG_FILE, maxBytes=1_000_000, backupCount=5, encoding="utf-8")
fh.setLevel(logging.DEBUG)
fmt = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
fh.setFormatter(fmt)
logger.addHandler(fh)
if DEBUG:
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(fmt)
logger.addHandler(sh)
return logger
LOGGER = setup_logger()
def _read_text_file(path: str) -> str:
try:
with open(path, "r", encoding="utf-8") as f:
return f.read().strip()
except Exception:
return ""
def load_bot_token() -> str:
tok = (os.environ.get("TELEGRAM_BOT_TOKEN") or "").strip()
if tok:
return tok
tok = (os.environ.get("BOT_TOKEN") or "").strip()
if tok:
return tok
tok = _read_text_file(TOKEN_FILE_HOME)
if tok:
return tok
tok = _read_text_file(TOKEN_FILE_ETC)
return tok.strip() if tok else ""
def send_telegram_message(message: str) -> None:
if not message:
return
bot_token = load_bot_token()
if not bot_token:
LOGGER.error("Token Telegram mancante (env/file). Messaggio NON inviato.")
return
url = f"https://api.telegram.org/bot{bot_token}/sendMessage"
for chat_id in TELEGRAM_CHAT_IDS:
payload = {
"chat_id": chat_id,
"text": message,
"parse_mode": "Markdown",
"disable_web_page_preview": True,
}
try:
data = urllib.parse.urlencode(payload).encode("utf-8")
req = urllib.request.Request(url, data=data)
with urllib.request.urlopen(req, timeout=20) as response:
if response.status == 200:
LOGGER.info("Report inviato a chat_id=%s", chat_id)
else:
LOGGER.error("Telegram HTTP %s chat_id=%s", response.status, chat_id)
time.sleep(0.25)
except Exception as e:
LOGGER.exception("Errore invio Telegram chat_id=%s: %s", chat_id, e)
def _to_mbps(val) -> float:
try:
v = float(val)
except Exception:
return 0.0
if VALUES_ARE_BITS:
return v / 1_000_000.0
return (v * 8.0) / 1_000_000.0
def _parse_created_at_utc(created_at) -> Optional[datetime.datetime]:
"""
Robust parsing:
- ISO con Z / offset
- "YYYY-MM-DD HH:MM:SS[.ms]" (assumiamo UTC se naive, perché spesso il DB è UTC)
"""
try:
if isinstance(created_at, datetime.datetime):
dt = created_at
else:
s = str(created_at).strip()
if not s:
return None
# normalizza Z
s = s.replace("Z", "+00:00")
# prova ISO (anche con T)
try:
dt = datetime.datetime.fromisoformat(s)
except Exception:
# fallback: "YYYY-MM-DD HH:MM:SS(.ms)"
s2 = s.split(".")[0].replace("T", " ")
dt = datetime.datetime.strptime(s2, "%Y-%m-%d %H:%M:%S")
if dt.tzinfo is None:
# assumi UTC se naive
dt = dt.replace(tzinfo=datetime.timezone.utc)
else:
dt = dt.astimezone(datetime.timezone.utc)
return dt
except Exception:
return None
def find_local_db_path() -> str:
for p in DB_CANDIDATES:
if not p:
continue
if os.path.exists(p) and os.path.isfile(p):
return p
return ""
def docker_copy_db_to_temp() -> str:
try:
subprocess.run(["docker", "inspect", SPEEDTEST_CONTAINER],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
except Exception as e:
LOGGER.error("Docker/container non disponibile (%s).", e)
return ""
try:
tmpdir = tempfile.mkdtemp(prefix="speedtest_db_")
dst = os.path.join(tmpdir, "database.sqlite")
except Exception as e:
LOGGER.exception("Impossibile creare directory temporanea: %s", e)
return ""
src = f"{SPEEDTEST_CONTAINER}:{CONTAINER_DB_PATH}"
try:
LOGGER.info("DB non trovato localmente. Copio da Docker: %s -> %s", src, dst)
subprocess.run(["docker", "cp", src, dst], check=True,
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
if os.path.exists(dst) and os.path.getsize(dst) > 0:
return dst
LOGGER.error("Copia Docker riuscita ma file assente/vuoto: %s", dst)
return ""
except Exception as e:
LOGGER.exception("Errore docker cp: %s", e)
return ""
def generate_report(db_path: str) -> Optional[str]:
now_utc = datetime.datetime.now(datetime.timezone.utc)
window_start_utc = now_utc - datetime.timedelta(hours=24)
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# NOTA: non filtriamo su created_at (string compare fragile).
# Prendiamo le ultime MAX_ROWS righe completate e filtriamo in Python.
query = """
SELECT download, upload, ping, created_at
FROM results
WHERE status = 'completed'
ORDER BY created_at DESC
LIMIT ?
"""
cursor.execute(query, (MAX_ROWS,))
raw_rows = cursor.fetchall()
except Exception as e:
LOGGER.exception("Errore DB (%s): %s", db_path, e)
return None
finally:
try:
conn.close()
except Exception:
pass
if not raw_rows:
LOGGER.info("Nessun test trovato.")
return None
# Filtra realmente per datetime (ultime 24h) e ordina crescente
rows: List[Tuple[datetime.datetime, float, float, float]] = []
for d_raw, u_raw, ping_raw, created_at in raw_rows:
dt_utc = _parse_created_at_utc(created_at)
if not dt_utc:
continue
if dt_utc < window_start_utc or dt_utc > now_utc:
continue
d_mbps = _to_mbps(d_raw)
u_mbps = _to_mbps(u_raw)
try:
ping_ms = float(ping_raw)
except Exception:
ping_ms = 0.0
rows.append((dt_utc, d_mbps, u_mbps, ping_ms))
rows.sort(key=lambda x: x[0])
LOGGER.debug("DB rows read=%s filtered_24h=%s (start=%s now=%s)",
len(raw_rows), len(rows),
window_start_utc.isoformat(timespec="seconds"),
now_utc.isoformat(timespec="seconds"))
if not rows:
LOGGER.info("Nessun test nelle ultime 24h dopo filtro datetime.")
return None
header = "ORA | Dn | Up | Pg |!"
sep = "-----+-----+-----+----+-"
total_down = 0.0
total_up = 0.0
count = 0
issues = 0
now_local = datetime.datetime.now()
msg = f"📊 **REPORT VELOCITÀ 24H**\n📅 {now_local.strftime('%d/%m/%Y')}\n\n"
msg += "```text\n"
msg += header + "\n"
msg += sep + "\n"
for dt_utc, d_mbps, u_mbps, ping_ms in rows:
total_down += d_mbps
total_up += u_mbps
count += 1
flag = " "
if d_mbps < WARN_DOWN or u_mbps < WARN_UP:
issues += 1
flag = "!"
time_str = dt_utc.astimezone().strftime("%H:%M")
msg += f"{time_str:<5}|{int(round(d_mbps)):>5}|{int(round(u_mbps)):>5}|{int(round(ping_ms)):>4}|{flag}\n"
msg += "```\n"
avg_d = total_down / count
avg_u = total_up / count
icon_d = "" if avg_d >= WARN_DOWN else "⚠️"
icon_u = "" if avg_u >= WARN_UP else "⚠️"
msg += f"Ø ⬇️{icon_d}`{avg_d:.0f} Mbps` ⬆️{icon_u}`{avg_u:.0f} Mbps`"
if issues > 0:
msg += f"\n\n⚠️ **{issues}** test sotto soglia (!)"
return msg
def main() -> None:
db_path = find_local_db_path()
if not db_path:
db_path = docker_copy_db_to_temp()
if not db_path:
LOGGER.error("Database non trovato. Locali provati=%s; Docker=%s:%s",
[p for p in DB_CANDIDATES if p],
SPEEDTEST_CONTAINER,
CONTAINER_DB_PATH)
print("❌ Database non trovato. Vedi daily_report.log.")
return
LOGGER.info("Uso database: %s", db_path)
report = generate_report(db_path)
if report:
send_telegram_message(report)
else:
LOGGER.info("Nessun report da inviare.")
if __name__ == "__main__":
main()