996 lines
36 KiB
Python
996 lines
36 KiB
Python
#!/usr/bin/env python3
|
||
# -*- coding: utf-8 -*-
|
||
|
||
import argparse
|
||
import datetime
|
||
import json
|
||
import logging
|
||
import os
|
||
import time
|
||
from logging.handlers import RotatingFileHandler
|
||
from typing import Dict, List, Optional, Tuple
|
||
from zoneinfo import ZoneInfo
|
||
|
||
import requests
|
||
from dateutil import parser
|
||
from open_meteo_client import open_meteo_get
|
||
|
||
# =========================
|
||
# CONFIG
|
||
# =========================
|
||
DEBUG = os.environ.get("DEBUG", "0").strip() == "1"
|
||
|
||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||
LOG_FILE = os.path.join(BASE_DIR, "nowcast_120m_alert.log")
|
||
STATE_FILE = os.path.join(BASE_DIR, "nowcast_120m_state.json")
|
||
|
||
TZ = "Europe/Berlin"
|
||
TZINFO = ZoneInfo(TZ)
|
||
|
||
# Casa (San Marino)
|
||
LAT = 43.9356
|
||
LON = 12.4296
|
||
LOCATION_NAME = "🏠 Casa (Strada Cà Toro)"
|
||
|
||
# Telegram (multi-chat)
|
||
TELEGRAM_CHAT_IDS = ["64463169", "24827341", "132455422", "5405962012"]
|
||
TOKEN_FILE_ETC = "/etc/telegram_dpc_bot_token"
|
||
TOKEN_FILE_HOME = os.path.expanduser("~/.telegram_dpc_bot_token")
|
||
|
||
# Open-Meteo
|
||
OPEN_METEO_URL = "https://api.open-meteo.com/v1/forecast"
|
||
MODEL_AROME = "meteofrance_seamless"
|
||
MODEL_ICON_IT = "italia_meteo_arpae_icon_2i"
|
||
COMPARISON_THRESHOLD = 0.30 # 30% scostamento per comparazione
|
||
|
||
# Finestra di valutazione
|
||
WINDOW_MINUTES = 120
|
||
|
||
# Soglie / “conferma”
|
||
# Pioggia intensa: coerente con 25mm/3h ≈ 8.3 mm/h -> soglia 8.0 mm/h
|
||
RAIN_INTENSE_MM_H = 8.0
|
||
RAIN_CONFIRM_HOURS = 2 # "confermato": almeno 2 ore consecutive
|
||
|
||
# Vento forte: raffiche >= 62 km/h (giallo PC ER)
|
||
WIND_GUST_STRONG_KMH = 62.0
|
||
WIND_CONFIRM_HOURS = 2 # almeno 2 ore consecutive
|
||
|
||
# Neve: accumulo nelle prossime 2 ore >= 2 cm (eventi significativi)
|
||
SNOW_ACCUM_2H_CM = 2.0
|
||
# Soglia più bassa per rilevare l'inizio della neve (anche leggera)
|
||
SNOW_ACCUM_2H_LIGHT_CM = 0.3 # 0.3 cm in 2 ore per rilevare inizio neve
|
||
# Soglia per neve persistente: accumulo totale su 6 ore (anche se distribuito)
|
||
SNOW_ACCUM_6H_PERSISTENT_CM = 0.15 # 0.15 cm in 6 ore per neve persistente
|
||
# Codici meteo che indicano neve (WMO)
|
||
SNOW_WEATHER_CODES = [71, 73, 75, 77, 85, 86] # Neve leggera, moderata, forte, granelli, rovesci
|
||
|
||
# Anti-spam: minimo intervallo tra invii uguali (in minuti)
|
||
MIN_RESEND_MINUTES = 180
|
||
|
||
|
||
def setup_logger() -> logging.Logger:
|
||
logger = logging.getLogger("nowcast_120m_alert")
|
||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
||
logger.handlers.clear()
|
||
|
||
fh = RotatingFileHandler(LOG_FILE, maxBytes=1_000_000, backupCount=5, encoding="utf-8")
|
||
fh.setLevel(logging.DEBUG)
|
||
fmt = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
|
||
fh.setFormatter(fmt)
|
||
logger.addHandler(fh)
|
||
|
||
if DEBUG:
|
||
sh = logging.StreamHandler()
|
||
sh.setLevel(logging.DEBUG)
|
||
sh.setFormatter(fmt)
|
||
logger.addHandler(sh)
|
||
|
||
return logger
|
||
|
||
|
||
LOGGER = setup_logger()
|
||
|
||
|
||
def now_local() -> datetime.datetime:
|
||
return datetime.datetime.now(TZINFO)
|
||
|
||
|
||
def read_text(path: str) -> str:
|
||
try:
|
||
with open(path, "r", encoding="utf-8") as f:
|
||
return f.read().strip()
|
||
except Exception:
|
||
return ""
|
||
|
||
|
||
def load_bot_token() -> str:
|
||
tok = (os.environ.get("TELEGRAM_BOT_TOKEN") or "").strip()
|
||
if tok:
|
||
return tok
|
||
tok = (os.environ.get("BOT_TOKEN") or "").strip()
|
||
if tok:
|
||
return tok
|
||
tok = read_text(TOKEN_FILE_HOME)
|
||
if tok:
|
||
return tok
|
||
tok = read_text(TOKEN_FILE_ETC)
|
||
return tok.strip() if tok else ""
|
||
|
||
|
||
def telegram_send_markdown(message: str, chat_ids: Optional[List[str]] = None) -> bool:
|
||
"""
|
||
Invia SOLO se message presente. Errori solo su log.
|
||
|
||
Args:
|
||
message: Messaggio Markdown da inviare
|
||
chat_ids: Lista di chat IDs (default: TELEGRAM_CHAT_IDS)
|
||
"""
|
||
if not message:
|
||
return False
|
||
|
||
token = load_bot_token()
|
||
if not token:
|
||
LOGGER.error("Token Telegram mancante. Messaggio NON inviato.")
|
||
return False
|
||
|
||
if chat_ids is None:
|
||
chat_ids = TELEGRAM_CHAT_IDS
|
||
|
||
url = f"https://api.telegram.org/bot{token}/sendMessage"
|
||
payload_base = {
|
||
"text": message,
|
||
"parse_mode": "Markdown",
|
||
"disable_web_page_preview": True,
|
||
}
|
||
|
||
ok_any = False
|
||
with requests.Session() as s:
|
||
for chat_id in chat_ids:
|
||
payload = dict(payload_base)
|
||
payload["chat_id"] = chat_id
|
||
try:
|
||
r = s.post(url, json=payload, timeout=20)
|
||
if r.status_code == 200:
|
||
ok_any = True
|
||
else:
|
||
LOGGER.error("Telegram HTTP %s chat_id=%s body=%s", r.status_code, chat_id, r.text[:300])
|
||
time.sleep(0.25)
|
||
except Exception as e:
|
||
LOGGER.exception("Errore invio Telegram chat_id=%s: %s", chat_id, e)
|
||
|
||
return ok_any
|
||
|
||
|
||
def parse_time_local(t: str) -> datetime.datetime:
|
||
dt = parser.isoparse(t)
|
||
if dt.tzinfo is None:
|
||
return dt.replace(tzinfo=TZINFO)
|
||
return dt.astimezone(TZINFO)
|
||
|
||
|
||
def get_forecast(model: str, use_minutely: bool = True, forecast_days: int = 2) -> Optional[Dict]:
|
||
"""
|
||
Recupera forecast. Se use_minutely=True e model è AROME, include anche minutely_15
|
||
per dettaglio 15 minuti nelle prossime 48 ore.
|
||
Se minutely_15 fallisce o ha troppi buchi, riprova automaticamente senza minutely_15.
|
||
|
||
Args:
|
||
model: Modello meteo da usare
|
||
use_minutely: Se True, include dati minutely_15 per AROME
|
||
forecast_days: Numero di giorni di previsione (default: 2 per 48h)
|
||
"""
|
||
params = {
|
||
"latitude": LAT,
|
||
"longitude": LON,
|
||
"timezone": TZ,
|
||
"forecast_days": forecast_days,
|
||
"models": model,
|
||
"wind_speed_unit": "kmh",
|
||
"precipitation_unit": "mm",
|
||
"hourly": ",".join([
|
||
"precipitation",
|
||
"windspeed_10m",
|
||
"windgusts_10m",
|
||
"snowfall",
|
||
"weathercode", # Aggiunto per rilevare neve anche quando snowfall è basso
|
||
]),
|
||
}
|
||
|
||
# Aggiungi minutely_15 per AROME Seamless (dettaglio 15 minuti)
|
||
# Se fallisce, riprova senza minutely_15
|
||
if use_minutely and model == MODEL_AROME:
|
||
params["minutely_15"] = "snowfall,precipitation_probability,precipitation,rain,temperature_2m,wind_speed_10m,wind_direction_10m"
|
||
|
||
try:
|
||
r = open_meteo_get(OPEN_METEO_URL, params=params, timeout=(5, 25))
|
||
if r.status_code == 400:
|
||
# Se 400 e abbiamo minutely_15, riprova senza
|
||
if "minutely_15" in params and model == MODEL_AROME:
|
||
LOGGER.warning("Open-Meteo 400 con minutely_15 (model=%s), riprovo senza minutely_15", model)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = open_meteo_get(OPEN_METEO_URL, params=params_no_minutely, timeout=(5, 25))
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
try:
|
||
j = r.json()
|
||
LOGGER.error("Open-Meteo 400 (model=%s): %s", model, j.get("reason", j))
|
||
except Exception:
|
||
LOGGER.error("Open-Meteo 400 (model=%s): %s", model, r.text[:300])
|
||
return None
|
||
elif r.status_code == 504:
|
||
# Gateway Timeout: se abbiamo minutely_15, riprova senza
|
||
if "minutely_15" in params and model == MODEL_AROME:
|
||
LOGGER.warning("Open-Meteo 504 Gateway Timeout con minutely_15 (model=%s), riprovo senza minutely_15", model)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = open_meteo_get(OPEN_METEO_URL, params=params_no_minutely, timeout=(5, 25))
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
LOGGER.error("Open-Meteo 504 Gateway Timeout (model=%s)", model)
|
||
return None
|
||
r.raise_for_status()
|
||
data = r.json()
|
||
|
||
# Verifica se minutely_15 ha buchi (anche solo 1 None = fallback a hourly)
|
||
if "minutely_15" in params and model == MODEL_AROME:
|
||
minutely = data.get("minutely_15", {}) or {}
|
||
minutely_times = minutely.get("time", []) or []
|
||
minutely_precip = minutely.get("precipitation", []) or []
|
||
minutely_snow = minutely.get("snowfall", []) or []
|
||
|
||
# Controlla se ci sono buchi (anche solo 1 None)
|
||
if minutely_times:
|
||
# Controlla tutti i parametri principali per buchi
|
||
has_holes = False
|
||
# Controlla precipitation
|
||
if minutely_precip and any(v is None for v in minutely_precip):
|
||
has_holes = True
|
||
# Controlla snowfall
|
||
if minutely_snow and any(v is None for v in minutely_snow):
|
||
has_holes = True
|
||
|
||
if has_holes:
|
||
LOGGER.warning("minutely_15 ha buchi (valori None rilevati, model=%s), riprovo senza minutely_15", model)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = open_meteo_get(OPEN_METEO_URL, params=params_no_minutely, timeout=(5, 25))
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
|
||
return data
|
||
except requests.exceptions.Timeout:
|
||
# Timeout: se abbiamo minutely_15, riprova senza
|
||
if "minutely_15" in params and model == MODEL_AROME:
|
||
LOGGER.warning("Open-Meteo Timeout con minutely_15 (model=%s), riprovo senza minutely_15", model)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = open_meteo_get(OPEN_METEO_URL, params=params_no_minutely, timeout=(5, 25))
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
LOGGER.exception("Open-Meteo timeout (model=%s)", model)
|
||
return None
|
||
except Exception as e:
|
||
# Altri errori: se abbiamo minutely_15, riprova senza
|
||
if "minutely_15" in params and model == MODEL_AROME:
|
||
LOGGER.warning("Open-Meteo error con minutely_15 (model=%s): %s, riprovo senza minutely_15", model, str(e))
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = open_meteo_get(OPEN_METEO_URL, params=params_no_minutely, timeout=(5, 25))
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
LOGGER.exception("Errore chiamata Open-Meteo (model=%s): %s", model, e)
|
||
return None
|
||
|
||
|
||
def find_precise_start_minutely(
|
||
minutely_data: Dict,
|
||
param_name: str,
|
||
threshold: float,
|
||
window_start: datetime.datetime,
|
||
window_end: datetime.datetime,
|
||
confirm_intervals: int = 2 # 2 intervalli da 15 min = 30 min conferma
|
||
) -> Optional[Dict]:
|
||
"""
|
||
Trova inizio preciso usando dati minutely_15 (risoluzione 15 minuti)
|
||
|
||
Returns:
|
||
{
|
||
"start": datetime,
|
||
"start_precise": str (HH:MM),
|
||
"value_at_start": float,
|
||
"confirmed": bool
|
||
} or None
|
||
"""
|
||
minutely = minutely_data.get("minutely_15", {}) or {}
|
||
times = minutely.get("time", []) or []
|
||
values = minutely.get(param_name, []) or []
|
||
|
||
if not times or not values:
|
||
return None
|
||
|
||
for i, (t_str, val) in enumerate(zip(times, values)):
|
||
try:
|
||
dt = parse_time_local(t_str)
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
|
||
val_float = float(val) if val is not None else 0.0
|
||
|
||
if val_float >= threshold:
|
||
# Verifica conferma (almeno confirm_intervals consecutivi)
|
||
confirmed = True
|
||
if i + confirm_intervals - 1 < len(values):
|
||
for k in range(1, confirm_intervals):
|
||
next_val = float(values[i + k]) if i + k < len(values) and values[i + k] is not None else 0.0
|
||
if next_val < threshold:
|
||
confirmed = False
|
||
break
|
||
else:
|
||
confirmed = False
|
||
|
||
if confirmed:
|
||
return {
|
||
"start": dt,
|
||
"start_precise": dt.strftime("%H:%M"),
|
||
"value_at_start": val_float,
|
||
"confirmed": confirmed
|
||
}
|
||
except Exception:
|
||
continue
|
||
|
||
return None
|
||
|
||
|
||
def analyze_snowfall_event(
|
||
times: List[str],
|
||
snowfall: List[float],
|
||
weathercode: List[int],
|
||
start_idx: int,
|
||
max_hours: int = 48
|
||
) -> Dict:
|
||
"""
|
||
Analizza una nevicata completa partendo da start_idx.
|
||
|
||
Calcola:
|
||
- Durata totale (ore consecutive con neve)
|
||
- Accumulo totale (somma di tutti i snowfall > 0)
|
||
- Ore di inizio e fine
|
||
|
||
Args:
|
||
times: Lista di timestamp
|
||
snowfall: Lista di valori snowfall (già in cm)
|
||
weathercode: Lista di weather codes
|
||
start_idx: Indice di inizio della nevicata
|
||
max_hours: Massimo numero di ore da analizzare (default: 48)
|
||
|
||
Returns:
|
||
Dict con:
|
||
- duration_hours: durata in ore
|
||
- total_accumulation_cm: accumulo totale in cm
|
||
- start_time: datetime di inizio
|
||
- end_time: datetime di fine (o None se continua oltre max_hours)
|
||
- is_ongoing: True se continua oltre max_hours
|
||
"""
|
||
from zoneinfo import ZoneInfo
|
||
|
||
if start_idx >= len(times):
|
||
return None
|
||
|
||
start_dt = parse_time_local(times[start_idx])
|
||
end_idx = start_idx
|
||
total_accum = 0.0
|
||
duration = 0
|
||
|
||
# Analizza fino a max_hours in avanti o fino alla fine dei dati
|
||
max_idx = min(start_idx + max_hours, len(times))
|
||
|
||
for i in range(start_idx, max_idx):
|
||
snow_val = snowfall[i] if i < len(snowfall) and snowfall[i] is not None else 0.0
|
||
code = weathercode[i] if i < len(weathercode) and weathercode[i] is not None else None
|
||
|
||
# Considera neve se: snowfall > 0 OPPURE weather_code indica neve
|
||
is_snow = (snow_val > 0.0) or (code in SNOW_WEATHER_CODES)
|
||
|
||
if is_snow:
|
||
duration += 1
|
||
total_accum += snow_val
|
||
end_idx = i
|
||
else:
|
||
# Se c'è una pausa, continua comunque a cercare (potrebbe essere una pausa temporanea)
|
||
# Ma se la pausa è > 2 ore, considera la nevicata terminata
|
||
pause_hours = 0
|
||
for j in range(i, min(i + 3, max_idx)):
|
||
next_snow = snowfall[j] if j < len(snowfall) and snowfall[j] is not None else 0.0
|
||
next_code = weathercode[j] if j < len(weathercode) and weathercode[j] is not None else None
|
||
if (next_snow > 0.0) or (next_code in SNOW_WEATHER_CODES):
|
||
break
|
||
pause_hours += 1
|
||
|
||
# Se pausa > 2 ore, termina l'analisi
|
||
if pause_hours >= 2:
|
||
break
|
||
|
||
end_dt = parse_time_local(times[end_idx]) if end_idx < len(times) else None
|
||
is_ongoing = (end_idx >= max_idx - 1) and (end_idx < len(times) - 1)
|
||
|
||
return {
|
||
"duration_hours": duration,
|
||
"total_accumulation_cm": total_accum,
|
||
"start_time": start_dt,
|
||
"end_time": end_dt,
|
||
"is_ongoing": is_ongoing,
|
||
"start_idx": start_idx,
|
||
"end_idx": end_idx
|
||
}
|
||
|
||
|
||
def find_snowfall_start(
|
||
times: List[str],
|
||
snowfall: List[float],
|
||
weathercode: List[int],
|
||
window_start: datetime.datetime,
|
||
window_end: datetime.datetime
|
||
) -> Optional[int]:
|
||
"""
|
||
Trova l'inizio di una nevicata nella finestra temporale.
|
||
|
||
Una nevicata inizia quando:
|
||
- snowfall > 0 OPPURE weather_code indica neve (71, 73, 75, 77, 85, 86)
|
||
|
||
Returns:
|
||
Indice del primo timestamp con neve, o None
|
||
"""
|
||
for i, t_str in enumerate(times):
|
||
try:
|
||
dt = parse_time_local(t_str)
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
|
||
snow_val = snowfall[i] if i < len(snowfall) and snowfall[i] is not None else 0.0
|
||
code = weathercode[i] if i < len(weathercode) and weathercode[i] is not None else None
|
||
|
||
# Rileva inizio neve
|
||
if (snow_val > 0.0) or (code in SNOW_WEATHER_CODES):
|
||
return i
|
||
except Exception:
|
||
continue
|
||
|
||
return None
|
||
|
||
|
||
def compare_values(arome_val: float, icon_val: float) -> Optional[Dict]:
|
||
"""Confronta due valori e ritorna info se scostamento >30%"""
|
||
if arome_val == 0 and icon_val == 0:
|
||
return None
|
||
|
||
if arome_val > 0:
|
||
diff_pct = abs(icon_val - arome_val) / arome_val
|
||
elif icon_val > 0:
|
||
diff_pct = abs(arome_val - icon_val) / icon_val
|
||
else:
|
||
return None
|
||
|
||
if diff_pct > COMPARISON_THRESHOLD:
|
||
return {
|
||
"diff_pct": diff_pct * 100,
|
||
"arome": arome_val,
|
||
"icon": icon_val
|
||
}
|
||
return None
|
||
|
||
|
||
def load_state() -> Dict:
|
||
if os.path.exists(STATE_FILE):
|
||
try:
|
||
with open(STATE_FILE, "r", encoding="utf-8") as f:
|
||
return json.load(f) or {}
|
||
except Exception:
|
||
return {}
|
||
return {
|
||
"active_events": {},
|
||
"last_sent_utc": ""
|
||
}
|
||
|
||
|
||
def save_state(state: Dict) -> None:
|
||
try:
|
||
with open(STATE_FILE, "w", encoding="utf-8") as f:
|
||
json.dump(state, f, ensure_ascii=False)
|
||
except Exception:
|
||
pass
|
||
|
||
|
||
def is_event_already_active(
|
||
active_events: Dict,
|
||
event_type: str,
|
||
start_time: datetime.datetime,
|
||
tolerance_hours: float = 2.0
|
||
) -> bool:
|
||
"""
|
||
Verifica se un evento con lo stesso inizio è già attivo.
|
||
|
||
Args:
|
||
active_events: Dict con eventi attivi per tipo
|
||
event_type: Tipo evento ("SNOW", "RAIN", "WIND")
|
||
start_time: Timestamp di inizio dell'evento
|
||
tolerance_hours: Tolleranza in ore per considerare lo stesso evento
|
||
|
||
Returns:
|
||
True se l'evento è già attivo
|
||
"""
|
||
events_of_type = active_events.get(event_type, [])
|
||
|
||
for event in events_of_type:
|
||
try:
|
||
event_start_str = event.get("start_time", "")
|
||
if not event_start_str:
|
||
continue
|
||
|
||
event_start = datetime.datetime.fromisoformat(event_start_str)
|
||
# Normalizza timezone
|
||
if event_start.tzinfo is None:
|
||
event_start = event_start.replace(tzinfo=TZINFO)
|
||
else:
|
||
event_start = event_start.astimezone(TZINFO)
|
||
|
||
# Normalizza start_time
|
||
if start_time.tzinfo is None:
|
||
start_time = start_time.replace(tzinfo=TZINFO)
|
||
else:
|
||
start_time = start_time.astimezone(TZINFO)
|
||
|
||
# Verifica se l'inizio è entro la tolleranza
|
||
time_diff = abs((start_time - event_start).total_seconds() / 3600.0)
|
||
if time_diff <= tolerance_hours:
|
||
return True
|
||
except Exception:
|
||
continue
|
||
|
||
return False
|
||
|
||
|
||
def add_active_event(
|
||
active_events: Dict,
|
||
event_type: str,
|
||
start_time: datetime.datetime,
|
||
end_time: Optional[datetime.datetime] = None,
|
||
is_ongoing: bool = False
|
||
) -> None:
|
||
"""
|
||
Aggiunge un evento attivo allo state.
|
||
"""
|
||
if event_type not in active_events:
|
||
active_events[event_type] = []
|
||
|
||
# Normalizza timezone
|
||
if start_time.tzinfo is None:
|
||
start_time = start_time.replace(tzinfo=TZINFO)
|
||
else:
|
||
start_time = start_time.astimezone(TZINFO)
|
||
|
||
event = {
|
||
"start_time": start_time.isoformat(),
|
||
"first_alerted": datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
||
"type": event_type,
|
||
"is_ongoing": is_ongoing
|
||
}
|
||
|
||
if end_time:
|
||
if end_time.tzinfo is None:
|
||
end_time = end_time.replace(tzinfo=TZINFO)
|
||
else:
|
||
end_time = end_time.astimezone(TZINFO)
|
||
event["end_time"] = end_time.isoformat()
|
||
|
||
active_events[event_type].append(event)
|
||
|
||
|
||
def cleanup_ended_events(
|
||
active_events: Dict,
|
||
now: datetime.datetime
|
||
) -> None:
|
||
"""
|
||
Rimuove eventi terminati dallo state (ma non invia notifiche di fine).
|
||
|
||
Un evento è considerato terminato se:
|
||
- Ha un end_time nel passato E non è ongoing
|
||
- O se l'evento è più vecchio di 48 ore (safety cleanup)
|
||
"""
|
||
if now.tzinfo is None:
|
||
now = now.replace(tzinfo=TZINFO)
|
||
else:
|
||
now = now.astimezone(TZINFO)
|
||
|
||
for event_type in list(active_events.keys()):
|
||
events = active_events[event_type]
|
||
kept_events = []
|
||
|
||
for event in events:
|
||
try:
|
||
start_time_str = event.get("start_time", "")
|
||
if not start_time_str:
|
||
continue
|
||
|
||
start_time = datetime.datetime.fromisoformat(start_time_str)
|
||
if start_time.tzinfo is None:
|
||
start_time = start_time.replace(tzinfo=TZINFO)
|
||
else:
|
||
start_time = start_time.astimezone(TZINFO)
|
||
|
||
# Safety cleanup: rimuovi eventi più vecchi di 48 ore
|
||
age_hours = (now - start_time).total_seconds() / 3600.0
|
||
if age_hours > 48:
|
||
LOGGER.debug("Rimosso evento %s vecchio di %.1f ore (cleanup)", event_type, age_hours)
|
||
continue
|
||
|
||
# Verifica se l'evento è terminato
|
||
end_time_str = event.get("end_time")
|
||
is_ongoing = event.get("is_ongoing", False)
|
||
|
||
if end_time_str and not is_ongoing:
|
||
end_time = datetime.datetime.fromisoformat(end_time_str)
|
||
if end_time.tzinfo is None:
|
||
end_time = end_time.replace(tzinfo=TZINFO)
|
||
else:
|
||
end_time = end_time.astimezone(TZINFO)
|
||
|
||
# Se end_time è nel passato, rimuovi l'evento
|
||
if end_time < now:
|
||
LOGGER.debug("Rimosso evento %s terminato alle %s", event_type, end_time_str)
|
||
continue
|
||
|
||
# Mantieni l'evento
|
||
kept_events.append(event)
|
||
except Exception as e:
|
||
LOGGER.debug("Errore cleanup evento %s: %s", event_type, e)
|
||
continue
|
||
|
||
active_events[event_type] = kept_events
|
||
|
||
|
||
def find_confirmed_start(
|
||
times: List[str],
|
||
cond: List[bool],
|
||
confirm_hours: int,
|
||
window_start: datetime.datetime,
|
||
window_end: datetime.datetime
|
||
) -> Optional[int]:
|
||
"""
|
||
Trova il primo indice i tale che:
|
||
- time[i] è dentro (window_start, window_end]
|
||
- cond[i..i+confirm_hours-1] tutte True
|
||
"""
|
||
n = len(times)
|
||
for i in range(n):
|
||
try:
|
||
dt = parse_time_local(times[i])
|
||
except Exception:
|
||
continue
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
if i + confirm_hours - 1 >= n:
|
||
continue
|
||
ok = True
|
||
for k in range(confirm_hours):
|
||
if not cond[i + k]:
|
||
ok = False
|
||
break
|
||
if ok:
|
||
return i
|
||
return None
|
||
|
||
|
||
def main(chat_ids: Optional[List[str]] = None, debug_mode: bool = False) -> None:
|
||
LOGGER.info("--- Nowcast 120m alert ---")
|
||
|
||
# Carica state e inizializza active_events
|
||
state = load_state()
|
||
active_events = state.get("active_events", {})
|
||
|
||
# Estendi forecast a 3 giorni per avere 48h di analisi neve completa
|
||
data_arome = get_forecast(MODEL_AROME, forecast_days=3)
|
||
if not data_arome:
|
||
return
|
||
|
||
hourly_arome = data_arome.get("hourly", {}) or {}
|
||
times = hourly_arome.get("time", []) or []
|
||
precip_arome = hourly_arome.get("precipitation", []) or []
|
||
gust_arome = hourly_arome.get("windgusts_10m", []) or []
|
||
snow_arome = hourly_arome.get("snowfall", []) or []
|
||
weathercode_arome = hourly_arome.get("weathercode", []) or [] # Per rilevare neve anche con snowfall basso
|
||
|
||
# Recupera dati ICON Italia per comparazione (48h)
|
||
data_icon = get_forecast(MODEL_ICON_IT, use_minutely=False, forecast_days=3)
|
||
hourly_icon = data_icon.get("hourly", {}) or {} if data_icon else {}
|
||
precip_icon = hourly_icon.get("precipitation", []) or []
|
||
gust_icon = hourly_icon.get("windgusts_10m", []) or []
|
||
snow_icon = hourly_icon.get("snowfall", []) or []
|
||
weathercode_icon = hourly_icon.get("weathercode", []) or []
|
||
|
||
if not times:
|
||
LOGGER.error("Open-Meteo: hourly.time mancante/vuoto")
|
||
return
|
||
|
||
now = now_local()
|
||
# Finestra per rilevare inizio neve: prossime 2 ore
|
||
window_start = now
|
||
window_end = now + datetime.timedelta(minutes=WINDOW_MINUTES)
|
||
|
||
# Normalizza array a lunghezza times
|
||
n = len(times)
|
||
def val(arr, i, cast=float) -> float:
|
||
try:
|
||
v = arr[i]
|
||
return cast(v) if v is not None else 0.0
|
||
except Exception:
|
||
return 0.0
|
||
|
||
rain_cond = [(val(precip_arome, i) >= RAIN_INTENSE_MM_H) for i in range(n)]
|
||
wind_cond = [(val(gust_arome, i) >= WIND_GUST_STRONG_KMH) for i in range(n)]
|
||
|
||
# Per neve: nuova logica - rileva inizio nevicata e analizza evento completo (48h)
|
||
snow_start_i = find_snowfall_start(times, snow_arome, weathercode_arome, window_start, window_end)
|
||
|
||
rain_i = find_confirmed_start(times, rain_cond, RAIN_CONFIRM_HOURS, window_start, window_end)
|
||
wind_i = find_confirmed_start(times, wind_cond, WIND_CONFIRM_HOURS, window_start, window_end)
|
||
|
||
if DEBUG:
|
||
LOGGER.debug("window now=%s end=%s model=%s", now.isoformat(timespec="minutes"), window_end.isoformat(timespec="minutes"), MODEL_AROME)
|
||
LOGGER.debug("rain_start=%s wind_start=%s snow_start=%s", rain_i, wind_i, snow_i)
|
||
|
||
alerts: List[str] = []
|
||
sig_parts: List[str] = []
|
||
comparisons: Dict[str, Dict] = {} # tipo_allerta -> comparison info
|
||
|
||
# Usa minutely_15 per trovare inizio preciso (se disponibile)
|
||
minutely_arome = data_arome.get("minutely_15", {}) or {}
|
||
minutely_available = bool(minutely_arome.get("time"))
|
||
|
||
# Pioggia intensa
|
||
if rain_i is not None:
|
||
start_dt = parse_time_local(times[rain_i])
|
||
|
||
# Se minutely_15 disponibile, trova inizio preciso (risoluzione 15 minuti)
|
||
precise_start = None
|
||
if minutely_available:
|
||
precise_start = find_precise_start_minutely(
|
||
minutely_arome, "precipitation", RAIN_INTENSE_MM_H, window_start, window_end, confirm_intervals=2
|
||
)
|
||
if precise_start:
|
||
start_dt = precise_start["start"]
|
||
|
||
# picco entro finestra
|
||
max_r_arome = 0.0
|
||
for i in range(n):
|
||
dt = parse_time_local(times[i])
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
max_r_arome = max(max_r_arome, val(precip_arome, i))
|
||
|
||
# Calcola picco ICON se disponibile
|
||
max_r_icon = 0.0
|
||
if len(precip_icon) >= n:
|
||
for i in range(n):
|
||
dt = parse_time_local(times[i])
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
max_r_icon = max(max_r_icon, val(precip_icon, i))
|
||
|
||
# Comparazione
|
||
comp_rain = compare_values(max_r_arome, max_r_icon) if max_r_icon > 0 else None
|
||
if comp_rain:
|
||
comparisons["rain"] = comp_rain
|
||
|
||
start_time_str = precise_start["start_precise"] if precise_start else start_dt.strftime('%H:%M')
|
||
detail_note = f" (dettaglio 15 min)" if precise_start else ""
|
||
|
||
alert_text = (
|
||
f"🌧️ *PIOGGIA INTENSA*\n"
|
||
f"Inizio confermato: `{start_time_str}`{detail_note} (≥ {RAIN_INTENSE_MM_H:.0f} mm/h per {RAIN_CONFIRM_HOURS}h)\n"
|
||
f"Picco stimato (entro {WINDOW_MINUTES}m): `{max_r_arome:.1f} mm/h`"
|
||
)
|
||
if comp_rain:
|
||
alert_text += f"\n⚠️ *Discordanza modelli*: AROME `{max_r_arome:.1f}` mm/h | ICON `{max_r_icon:.1f}` mm/h (scostamento {comp_rain['diff_pct']:.0f}%)"
|
||
alerts.append(alert_text)
|
||
sig_parts.append(f"RAIN@{start_dt.strftime('%Y%m%d%H%M')}/peak{max_r_arome:.1f}")
|
||
|
||
# Vento forte (raffiche)
|
||
if wind_i is not None:
|
||
start_dt = parse_time_local(times[wind_i])
|
||
|
||
# Verifica se l'evento è già attivo
|
||
is_already_active = is_event_already_active(active_events, "WIND", start_dt, tolerance_hours=2.0)
|
||
|
||
if is_already_active:
|
||
LOGGER.info("Evento vento già attivo (inizio: %s), non invio notifica", start_dt.strftime('%Y-%m-%d %H:%M'))
|
||
else:
|
||
max_g_arome = 0.0
|
||
for i in range(n):
|
||
dt = parse_time_local(times[i])
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
max_g_arome = max(max_g_arome, val(gust_arome, i))
|
||
|
||
# Calcola picco ICON se disponibile
|
||
max_g_icon = 0.0
|
||
if len(gust_icon) >= n:
|
||
for i in range(n):
|
||
dt = parse_time_local(times[i])
|
||
if dt < window_start or dt > window_end:
|
||
continue
|
||
max_g_icon = max(max_g_icon, val(gust_icon, i))
|
||
|
||
# Comparazione
|
||
comp_wind = compare_values(max_g_arome, max_g_icon) if max_g_icon > 0 else None
|
||
if comp_wind:
|
||
comparisons["wind"] = comp_wind
|
||
|
||
alert_text = (
|
||
f"💨 *VENTO FORTE*\n"
|
||
f"Inizio confermato: `{start_dt.strftime('%H:%M')}` (raffiche ≥ {WIND_GUST_STRONG_KMH:.0f} km/h per {WIND_CONFIRM_HOURS}h)\n"
|
||
f"Raffica max stimata (entro {WINDOW_MINUTES}m): `{max_g_arome:.0f} km/h`"
|
||
)
|
||
if comp_wind:
|
||
alert_text += f"\n⚠️ *Discordanza modelli*: AROME `{max_g_arome:.0f}` km/h | ICON `{max_g_icon:.0f}` km/h (scostamento {comp_wind['diff_pct']:.0f}%)"
|
||
alerts.append(alert_text)
|
||
sig_parts.append(f"WIND@{start_dt.strftime('%Y%m%d%H%M')}")
|
||
|
||
# Aggiungi evento attivo allo state (stima fine: 6 ore dopo inizio)
|
||
estimated_end = start_dt + datetime.timedelta(hours=6)
|
||
add_active_event(active_events, "WIND", start_dt, estimated_end, is_ongoing=False)
|
||
|
||
# Neve: analizza evento completo (48h)
|
||
if snow_start_i is not None:
|
||
# Analizza la nevicata completa
|
||
snow_event = analyze_snowfall_event(times, snow_arome, weathercode_arome, snow_start_i, max_hours=48)
|
||
|
||
if snow_event:
|
||
start_dt = snow_event["start_time"]
|
||
total_accum_cm = snow_event["total_accumulation_cm"]
|
||
duration_hours = snow_event["duration_hours"]
|
||
end_dt = snow_event["end_time"]
|
||
is_ongoing = snow_event["is_ongoing"]
|
||
|
||
# Determina severità in base all'accumulo totale
|
||
is_significant = total_accum_cm >= SNOW_ACCUM_2H_CM
|
||
severity_emoji = "❄️" if is_significant else "🌨️"
|
||
severity_text = "NEVE SIGNIFICATIVA" if is_significant else "NEVE"
|
||
|
||
# Se minutely_15 disponibile, trova inizio preciso
|
||
precise_start_snow = None
|
||
if minutely_available:
|
||
precise_start_snow = find_precise_start_minutely(
|
||
minutely_arome, "snowfall", 0.01, window_start, window_end, confirm_intervals=1
|
||
)
|
||
|
||
start_time_str = precise_start_snow["start_precise"] if precise_start_snow else start_dt.strftime('%H:%M')
|
||
detail_note = f" (dettaglio 15 min)" if precise_start_snow else ""
|
||
|
||
# Calcola accumulo ICON per comparazione
|
||
if data_icon and snow_start_i < len(snow_icon):
|
||
icon_event = analyze_snowfall_event(times, snow_icon, weathercode_icon, snow_start_i, max_hours=48)
|
||
icon_accum_cm = icon_event["total_accumulation_cm"] if icon_event else 0.0
|
||
comp_snow = compare_values(total_accum_cm, icon_accum_cm) if icon_accum_cm > 0 else None
|
||
else:
|
||
comp_snow = None
|
||
|
||
if comp_snow:
|
||
comparisons["snow"] = comp_snow
|
||
|
||
# Costruisci messaggio con durata e accumulo totale
|
||
end_time_str = end_dt.strftime('%H:%M') if end_dt and not is_ongoing else "in corso"
|
||
duration_text = f"{duration_hours}h" if duration_hours > 0 else "<1h"
|
||
|
||
alert_text = (
|
||
f"{severity_emoji} *{severity_text}*\n"
|
||
f"Inizio: `{start_time_str}`{detail_note}\n"
|
||
f"Durata prevista: `{duration_text}`"
|
||
)
|
||
|
||
if end_dt and not is_ongoing:
|
||
alert_text += f" (fino alle `{end_time_str}`)"
|
||
elif is_ongoing:
|
||
alert_text += f" (continua oltre 48h)"
|
||
|
||
alert_text += f"\nAccumulo totale previsto: `{total_accum_cm:.2f} cm`"
|
||
|
||
if comp_snow:
|
||
alert_text += f"\n⚠️ *Discordanza modelli*: AROME `{total_accum_cm:.2f}` cm | ICON `{icon_accum_cm:.2f}` cm (scostamento {comp_snow['diff_pct']:.0f}%)"
|
||
|
||
alerts.append(alert_text)
|
||
sig_parts.append(f"SNOW@{start_dt.strftime('%Y%m%d%H%M')}/dur{duration_hours}h/acc{total_accum_cm:.1f}")
|
||
|
||
|
||
signature = "|".join(sig_parts)
|
||
|
||
# Se non ci sono nuovi eventi, non inviare nulla (non inviare notifiche di fine evento)
|
||
if not alerts:
|
||
if debug_mode:
|
||
# In modalità debug, crea un messaggio informativo anche se non ci sono allerte
|
||
LOGGER.info("[DEBUG MODE] Nessuna allerta, ma creo messaggio informativo")
|
||
alerts.append("ℹ️ <i>Nessuna allerta confermata entro %s minuti.</i>" % WINDOW_MINUTES)
|
||
sig_parts.append("NO_ALERT")
|
||
else:
|
||
LOGGER.info("Nessuna allerta confermata entro %s minuti.", WINDOW_MINUTES)
|
||
# Salva state aggiornato (con eventi puliti) anche se non inviamo notifiche
|
||
state["active_events"] = active_events
|
||
save_state(state)
|
||
return
|
||
|
||
# Anti-spam: controlla solo se ci sono nuovi eventi
|
||
last_sent = state.get("last_sent_utc", "")
|
||
last_sent_dt = None
|
||
if last_sent:
|
||
try:
|
||
last_sent_dt = datetime.datetime.fromisoformat(last_sent).astimezone(datetime.timezone.utc)
|
||
except Exception:
|
||
last_sent_dt = None
|
||
|
||
now_utc = datetime.datetime.now(datetime.timezone.utc)
|
||
too_soon = False
|
||
if last_sent_dt is not None:
|
||
delta_min = (now_utc - last_sent_dt).total_seconds() / 60.0
|
||
too_soon = delta_min < MIN_RESEND_MINUTES
|
||
|
||
# In modalità debug, bypassa controlli anti-spam
|
||
if debug_mode:
|
||
LOGGER.info("[DEBUG MODE] Bypass anti-spam: invio forzato")
|
||
elif too_soon:
|
||
LOGGER.info("Allerta già inviata di recente (troppo presto).")
|
||
# Salva state aggiornato anche se non inviamo
|
||
state["active_events"] = active_events
|
||
save_state(state)
|
||
return
|
||
|
||
model_info = MODEL_AROME
|
||
if comparisons:
|
||
model_info = f"{MODEL_AROME} + ICON Italia (discordanza rilevata)"
|
||
|
||
msg = (
|
||
f"⚠️ *ALLERTA METEO (entro {WINDOW_MINUTES} minuti)*\n"
|
||
f"📍 {LOCATION_NAME}\n"
|
||
f"🕒 Agg. `{now.strftime('%d/%m %H:%M')}` (modello: `{model_info}`)\n\n"
|
||
+ "\n\n".join(alerts)
|
||
+ "\n\n_Fonte: Open-Meteo_"
|
||
)
|
||
|
||
ok = telegram_send_markdown(msg, chat_ids=chat_ids)
|
||
if ok:
|
||
LOGGER.info("Notifica inviata.")
|
||
# Salva state con eventi attivi aggiornati
|
||
state["active_events"] = active_events
|
||
state["last_sent_utc"] = now_utc.isoformat(timespec="seconds")
|
||
save_state(state)
|
||
else:
|
||
LOGGER.error("Notifica NON inviata (token/telegram).")
|
||
# Salva comunque lo state aggiornato
|
||
state["active_events"] = active_events
|
||
save_state(state)
|
||
|
||
|
||
if __name__ == "__main__":
|
||
arg_parser = argparse.ArgumentParser(description="Nowcast 120m alert")
|
||
arg_parser.add_argument("--debug", action="store_true", help="Invia messaggi solo all'admin (chat ID: %s)" % TELEGRAM_CHAT_IDS[0])
|
||
args = arg_parser.parse_args()
|
||
|
||
# In modalità debug, invia solo al primo chat ID (admin) e bypassa anti-spam
|
||
chat_ids = [TELEGRAM_CHAT_IDS[0]] if args.debug else None
|
||
|
||
main(chat_ids=chat_ids, debug_mode=args.debug)
|