1551 lines
62 KiB
Python
1551 lines
62 KiB
Python
#!/usr/bin/env python3
|
||
# -*- coding: utf-8 -*-
|
||
|
||
import argparse
|
||
import datetime
|
||
import html
|
||
import json
|
||
import logging
|
||
import os
|
||
import time
|
||
from logging.handlers import RotatingFileHandler
|
||
from typing import Dict, List, Optional, Tuple
|
||
from zoneinfo import ZoneInfo
|
||
|
||
import requests
|
||
from dateutil import parser
|
||
|
||
# =============================================================================
|
||
# SEVERE WEATHER ALERT (next 48h) - Casa (LAT/LON)
|
||
# - Wind gusts persistence: >= soglia per almeno 2 ore consecutive
|
||
# - Rain persistence: soglia (mm/3h) superata per almeno 2 ore (2 finestre 3h consecutive)
|
||
# - Convective storms (temporali severi): analisi combinata ICON Italia + AROME Seamless
|
||
# * Fulminazioni (CAPE > 800 J/kg + LPI > 0)
|
||
# * Downburst/Temporali violenti (CAPE > 1500 J/kg + Wind Gusts > 60 km/h)
|
||
# * Nubifragi (Precipitation > 20mm/h o somma 3h > 40mm)
|
||
#
|
||
# Telegram token: NOT in clear.
|
||
# Read order:
|
||
# 1) env TELEGRAM_BOT_TOKEN
|
||
# 2) ~/.telegram_dpc_bot_token
|
||
# 3) /etc/telegram_dpc_bot_token
|
||
#
|
||
# Debug:
|
||
# DEBUG=1 python3 severe_weather.py
|
||
#
|
||
# Log:
|
||
# ./weather_alert.log (same folder as this script)
|
||
# =============================================================================
|
||
|
||
DEBUG = os.environ.get("DEBUG", "0").strip() == "1"
|
||
|
||
# ----------------- TELEGRAM -----------------
|
||
TELEGRAM_CHAT_IDS = ["64463169", "24827341", "132455422", "5405962012"]
|
||
TOKEN_FILE_HOME = os.path.expanduser("~/.telegram_dpc_bot_token")
|
||
TOKEN_FILE_ETC = "/etc/telegram_dpc_bot_token"
|
||
|
||
# ----------------- LOCATION -----------------
|
||
DEFAULT_LAT = 43.9356
|
||
DEFAULT_LON = 12.4296
|
||
|
||
# ----------------- THRESHOLDS -----------------
|
||
# Vento (km/h) - soglie come da tuo set
|
||
WIND_YELLOW = 62.0
|
||
WIND_ORANGE = 75.0
|
||
WIND_RED = 88.0
|
||
|
||
# Pioggia: mm in 3 ore
|
||
RAIN_3H_LIMIT = 25.0
|
||
|
||
# Persistenza minima richiesta (ore)
|
||
PERSIST_HOURS = 2 # richiesta utente: >=2 ore
|
||
|
||
# ----------------- HORIZON -----------------
|
||
HOURS_AHEAD = 48 # Esteso a 48h per analisi temporali severi
|
||
|
||
# ----------------- CONVECTIVE STORM THRESHOLDS -----------------
|
||
CAPE_LIGHTNING_THRESHOLD = 800.0 # J/kg - Soglia per rischio fulminazioni
|
||
CAPE_SEVERE_THRESHOLD = 1500.0 # J/kg - Soglia per temporali violenti
|
||
WIND_GUST_DOWNBURST_THRESHOLD = 60.0 # km/h - Soglia vento per downburst
|
||
RAIN_INTENSE_THRESHOLD_H = 20.0 # mm/h - Soglia per nubifragio orario
|
||
RAIN_INTENSE_THRESHOLD_3H = 40.0 # mm/3h - Soglia per nubifragio su 3 ore
|
||
STORM_SCORE_THRESHOLD = 40.0 # Storm Severity Score minimo per allerta
|
||
|
||
# ----------------- FILES -----------------
|
||
STATE_FILE = "/home/daniely/docker/telegram-bot/weather_state.json"
|
||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||
LOG_FILE = os.path.join(BASE_DIR, "weather_alert.log")
|
||
|
||
# ----------------- OPEN-METEO -----------------
|
||
OPEN_METEO_URL = "https://api.open-meteo.com/v1/forecast"
|
||
TZ = "Europe/Berlin"
|
||
TZINFO = ZoneInfo(TZ)
|
||
HTTP_HEADERS = {"User-Agent": "rpi-severe-weather/2.0"}
|
||
|
||
# Force model: AROME Seamless (fornisce rain/snowfall/weathercode)
|
||
MODEL_PRIMARY = "meteofrance_seamless"
|
||
# Fallback (stessa famiglia Meteo-France) per continuità operativa
|
||
MODEL_FALLBACK = "meteofrance_arome_france_hd"
|
||
# Modello per comparazione
|
||
MODEL_ICON_IT = "italia_meteo_arpae_icon_2i"
|
||
COMPARISON_THRESHOLD = 0.30 # 30% scostamento per comparazione
|
||
|
||
# Se True, invia messaggio "rientrata" quando tutto torna sotto soglia (non è un errore)
|
||
SEND_ALL_CLEAR = True
|
||
|
||
|
||
# =============================================================================
|
||
# LOGGING
|
||
# =============================================================================
|
||
def setup_logger() -> logging.Logger:
|
||
logger = logging.getLogger("severe_weather")
|
||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
||
logger.handlers.clear()
|
||
|
||
fh = RotatingFileHandler(LOG_FILE, maxBytes=1_000_000, backupCount=5, encoding="utf-8")
|
||
fh.setLevel(logging.DEBUG)
|
||
fmt = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
|
||
fh.setFormatter(fmt)
|
||
logger.addHandler(fh)
|
||
|
||
if DEBUG:
|
||
sh = logging.StreamHandler()
|
||
sh.setLevel(logging.DEBUG)
|
||
sh.setFormatter(fmt)
|
||
logger.addHandler(sh)
|
||
|
||
return logger
|
||
|
||
|
||
LOGGER = setup_logger()
|
||
|
||
|
||
# =============================================================================
|
||
# UTILS
|
||
# =============================================================================
|
||
def ensure_parent_dir(path: str) -> None:
|
||
parent = os.path.dirname(path)
|
||
if parent and not os.path.exists(parent):
|
||
os.makedirs(parent, exist_ok=True)
|
||
|
||
|
||
def now_local() -> datetime.datetime:
|
||
return datetime.datetime.now(TZINFO)
|
||
|
||
|
||
def read_text_file(path: str) -> str:
|
||
try:
|
||
with open(path, "r", encoding="utf-8") as f:
|
||
return f.read().strip()
|
||
except FileNotFoundError:
|
||
return ""
|
||
except PermissionError:
|
||
LOGGER.debug("Permission denied reading %s", path)
|
||
return ""
|
||
except Exception as e:
|
||
LOGGER.exception("Error reading %s: %s", path, e)
|
||
return ""
|
||
|
||
|
||
def load_bot_token() -> str:
|
||
tok = os.environ.get("TELEGRAM_BOT_TOKEN", "").strip()
|
||
if tok:
|
||
return tok
|
||
tok = read_text_file(TOKEN_FILE_HOME)
|
||
if tok:
|
||
return tok
|
||
tok = read_text_file(TOKEN_FILE_ETC)
|
||
return tok.strip() if tok else ""
|
||
|
||
|
||
def parse_time_to_local(t: str) -> datetime.datetime:
|
||
"""
|
||
Robust timezone handling:
|
||
- If timestamps are naive (common when timezone=Europe/Rome), interpret as Europe/Rome.
|
||
- If timestamps include offset, convert to Europe/Rome.
|
||
"""
|
||
dt = parser.isoparse(t)
|
||
if dt.tzinfo is None:
|
||
return dt.replace(tzinfo=TZINFO)
|
||
return dt.astimezone(TZINFO)
|
||
|
||
|
||
def hhmm(dt: datetime.datetime) -> str:
|
||
return dt.strftime("%H:%M")
|
||
|
||
|
||
def ddmmyy_hhmm(dt: datetime.datetime) -> str:
|
||
"""Formatta datetime come 'dd/mm HH:MM' per includere data e ora."""
|
||
return dt.strftime("%d/%m %H:%M")
|
||
|
||
|
||
# =============================================================================
|
||
# TELEGRAM
|
||
# =============================================================================
|
||
def telegram_send_html(message_html: str, chat_ids: Optional[List[str]] = None) -> bool:
|
||
"""
|
||
Never raises. Returns True if at least one chat_id succeeded.
|
||
IMPORTANT: called only on REAL ALERTS (not on errors).
|
||
|
||
Args:
|
||
message_html: Messaggio HTML da inviare
|
||
chat_ids: Lista di chat IDs (default: TELEGRAM_CHAT_IDS)
|
||
"""
|
||
token = load_bot_token()
|
||
if not token:
|
||
LOGGER.warning("Telegram token missing: message not sent.")
|
||
return False
|
||
|
||
if chat_ids is None:
|
||
chat_ids = TELEGRAM_CHAT_IDS
|
||
|
||
url = f"https://api.telegram.org/bot{token}/sendMessage"
|
||
base_payload = {
|
||
"text": message_html,
|
||
"parse_mode": "HTML",
|
||
"disable_web_page_preview": True,
|
||
}
|
||
|
||
sent_ok = False
|
||
with requests.Session() as s:
|
||
for chat_id in chat_ids:
|
||
payload = dict(base_payload)
|
||
payload["chat_id"] = chat_id
|
||
try:
|
||
resp = s.post(url, json=payload, timeout=15)
|
||
if resp.status_code == 200:
|
||
sent_ok = True
|
||
else:
|
||
LOGGER.error("Telegram error chat_id=%s status=%s body=%s",
|
||
chat_id, resp.status_code, resp.text[:500])
|
||
time.sleep(0.25)
|
||
except Exception as e:
|
||
LOGGER.exception("Telegram exception chat_id=%s err=%s", chat_id, e)
|
||
|
||
return sent_ok
|
||
|
||
|
||
# =============================================================================
|
||
# STATE
|
||
# =============================================================================
|
||
def load_state() -> Dict:
|
||
default = {
|
||
"alert_active": False,
|
||
"wind_level": 0,
|
||
"last_wind_peak": 0.0,
|
||
"last_rain_3h": 0.0,
|
||
"convective_storm_active": False,
|
||
"last_storm_score": 0.0,
|
||
"last_alert_type": None, # Tipo di allerta: "VENTO", "PIOGGIA", "TEMPORALI", o lista combinata
|
||
"last_alert_time": None, # Timestamp ISO dell'ultima notifica
|
||
}
|
||
if os.path.exists(STATE_FILE):
|
||
try:
|
||
with open(STATE_FILE, "r", encoding="utf-8") as f:
|
||
data = json.load(f) or {}
|
||
default.update(data)
|
||
except Exception as e:
|
||
LOGGER.exception("State read error: %s", e)
|
||
return default
|
||
|
||
|
||
def save_state(state: Dict) -> None:
|
||
try:
|
||
ensure_parent_dir(STATE_FILE)
|
||
with open(STATE_FILE, "w", encoding="utf-8") as f:
|
||
json.dump(state, f, ensure_ascii=False, indent=2)
|
||
except Exception as e:
|
||
LOGGER.exception("State write error: %s", e)
|
||
|
||
|
||
# =============================================================================
|
||
# OPEN-METEO
|
||
# =============================================================================
|
||
def fetch_forecast(models_value: str, lat: Optional[float] = None, lon: Optional[float] = None, timezone: Optional[str] = None) -> Optional[Dict]:
|
||
if lat is None:
|
||
lat = DEFAULT_LAT
|
||
if lon is None:
|
||
lon = DEFAULT_LON
|
||
|
||
# Usa timezone personalizzata se fornita, altrimenti default
|
||
tz_to_use = timezone if timezone else TZ
|
||
|
||
# Parametri base per tutti i modelli
|
||
hourly_params = "precipitation,wind_gusts_10m,weather_code"
|
||
|
||
# Parametri specifici per modello
|
||
if models_value == MODEL_PRIMARY or models_value == MODEL_FALLBACK:
|
||
# AROME: aggiungi CAPE (Convective Available Potential Energy) e altri parametri convettivi
|
||
hourly_params += ",cape,convective_inhibition"
|
||
elif models_value == MODEL_ICON_IT:
|
||
# ICON Italia: prova a richiedere LPI (Lightning Potential Index) se disponibile
|
||
# Nota: il parametro esatto potrebbe variare, proviamo più varianti
|
||
# Se non disponibile, useremo CAPE come proxy
|
||
hourly_params += ",cape" # ICON potrebbe avere CAPE, usiamolo come fallback
|
||
|
||
params = {
|
||
"latitude": lat,
|
||
"longitude": lon,
|
||
"hourly": hourly_params,
|
||
"timezone": tz_to_use,
|
||
"forecast_days": 2,
|
||
"wind_speed_unit": "kmh",
|
||
"precipitation_unit": "mm",
|
||
"models": models_value,
|
||
}
|
||
|
||
# Aggiungi minutely_15 per AROME Seamless (dettaglio 15 minuti per inizio preciso eventi)
|
||
# Se fallisce o ha buchi, riprova senza minutely_15
|
||
use_minutely = False
|
||
if models_value == MODEL_PRIMARY:
|
||
params["minutely_15"] = "precipitation,rain,snowfall,wind_speed_10m,wind_direction_10m,weather_code,temperature_2m"
|
||
use_minutely = True
|
||
|
||
try:
|
||
r = requests.get(OPEN_METEO_URL, params=params, headers=HTTP_HEADERS, timeout=25)
|
||
if r.status_code == 400:
|
||
# Se 400 e abbiamo minutely_15, riprova senza
|
||
if use_minutely and "minutely_15" in params:
|
||
LOGGER.warning("Open-Meteo 400 con minutely_15 (models=%s), riprovo senza minutely_15", models_value)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = requests.get(OPEN_METEO_URL, params=params_no_minutely, headers=HTTP_HEADERS, timeout=25)
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
# Log reason if present; no Telegram on errors
|
||
try:
|
||
j = r.json()
|
||
LOGGER.error("Open-Meteo 400 (models=%s): %s", models_value, j.get("reason", j))
|
||
except Exception:
|
||
LOGGER.error("Open-Meteo 400 (models=%s): %s", models_value, r.text[:500])
|
||
return None
|
||
elif r.status_code == 504:
|
||
# Gateway Timeout: se abbiamo minutely_15, riprova senza
|
||
if use_minutely and "minutely_15" in params:
|
||
LOGGER.warning("Open-Meteo 504 Gateway Timeout con minutely_15 (models=%s), riprovo senza minutely_15", models_value)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = requests.get(OPEN_METEO_URL, params=params_no_minutely, headers=HTTP_HEADERS, timeout=25)
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
LOGGER.error("Open-Meteo 504 Gateway Timeout (models=%s)", models_value)
|
||
return None
|
||
r.raise_for_status()
|
||
data = r.json()
|
||
|
||
# Verifica se minutely_15 ha buchi (anche solo 1 None = fallback a hourly)
|
||
if use_minutely and "minutely_15" in params:
|
||
minutely = data.get("minutely_15", {}) or {}
|
||
minutely_times = minutely.get("time", []) or []
|
||
minutely_precip = minutely.get("precipitation", []) or []
|
||
minutely_rain = minutely.get("rain", []) or []
|
||
|
||
# Controlla se ci sono buchi (anche solo 1 None)
|
||
if minutely_times:
|
||
has_holes = False
|
||
# Controlla precipitation
|
||
if minutely_precip and any(v is None for v in minutely_precip):
|
||
has_holes = True
|
||
# Controlla rain
|
||
if minutely_rain and any(v is None for v in minutely_rain):
|
||
has_holes = True
|
||
|
||
if has_holes:
|
||
LOGGER.warning("minutely_15 ha buchi (valori None rilevati, models=%s), riprovo senza minutely_15", models_value)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = requests.get(OPEN_METEO_URL, params=params_no_minutely, headers=HTTP_HEADERS, timeout=25)
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
|
||
return data
|
||
except requests.exceptions.Timeout:
|
||
# Timeout: se abbiamo minutely_15, riprova senza
|
||
if use_minutely and "minutely_15" in params:
|
||
LOGGER.warning("Open-Meteo Timeout con minutely_15 (models=%s), riprovo senza minutely_15", models_value)
|
||
params_no_minutely = params.copy()
|
||
del params_no_minutely["minutely_15"]
|
||
try:
|
||
r2 = requests.get(OPEN_METEO_URL, params=params_no_minutely, headers=HTTP_HEADERS, timeout=25)
|
||
if r2.status_code == 200:
|
||
return r2.json()
|
||
except Exception:
|
||
pass
|
||
LOGGER.exception("Open-Meteo request timeout (models=%s)", models_value)
|
||
return None
|
||
except Exception as e:
|
||
LOGGER.exception("Open-Meteo request error (models=%s): %s", models_value, e)
|
||
return None
|
||
|
||
|
||
def get_forecast(lat: Optional[float] = None, lon: Optional[float] = None, timezone: Optional[str] = None) -> Tuple[Optional[Dict], str]:
|
||
LOGGER.debug("Requesting Open-Meteo with models=%s", MODEL_PRIMARY)
|
||
data = fetch_forecast(MODEL_PRIMARY, lat=lat, lon=lon, timezone=timezone)
|
||
if data is not None:
|
||
return data, MODEL_PRIMARY
|
||
|
||
LOGGER.warning("Primary model failed (%s). Trying fallback=%s", MODEL_PRIMARY, MODEL_FALLBACK)
|
||
data2 = fetch_forecast(MODEL_FALLBACK, lat=lat, lon=lon, timezone=timezone)
|
||
if data2 is not None:
|
||
return data2, MODEL_FALLBACK
|
||
|
||
return None, MODEL_PRIMARY
|
||
|
||
|
||
def compare_values(arome_val: float, icon_val: float) -> Optional[Dict]:
|
||
"""Confronta due valori e ritorna info se scostamento >30%"""
|
||
if arome_val == 0 and icon_val == 0:
|
||
return None
|
||
|
||
if arome_val > 0:
|
||
diff_pct = abs(icon_val - arome_val) / arome_val
|
||
elif icon_val > 0:
|
||
diff_pct = abs(arome_val - icon_val) / icon_val
|
||
else:
|
||
return None
|
||
|
||
if diff_pct > COMPARISON_THRESHOLD:
|
||
return {
|
||
"diff_pct": diff_pct * 100,
|
||
"arome": arome_val,
|
||
"icon": icon_val
|
||
}
|
||
return None
|
||
|
||
|
||
# =============================================================================
|
||
# RAINFALL EVENT ANALYSIS (48h extended)
|
||
# =============================================================================
|
||
def analyze_rainfall_event(
|
||
times: List[str],
|
||
precipitation: List[float],
|
||
weathercode: List[int],
|
||
start_idx: int,
|
||
max_hours: int = 48,
|
||
threshold_mm_h: Optional[float] = None
|
||
) -> Optional[Dict]:
|
||
"""
|
||
Analizza un evento di pioggia intensa completo partendo da start_idx.
|
||
|
||
Calcola:
|
||
- Durata totale (ore consecutive con pioggia significativa)
|
||
- Accumulo totale (somma di tutti i precipitation > 0 o sopra soglia)
|
||
- Ore di inizio e fine
|
||
- Intensità massima oraria
|
||
|
||
Args:
|
||
times: Lista di timestamp
|
||
precipitation: Lista di valori precipitation (in mm)
|
||
weathercode: Lista di weather codes
|
||
start_idx: Indice di inizio dell'evento
|
||
max_hours: Massimo numero di ore da analizzare (default: 48)
|
||
threshold_mm_h: Soglia minima per considerare pioggia significativa (mm/h). Se None, usa qualsiasi pioggia > 0
|
||
|
||
Returns:
|
||
Dict con:
|
||
- duration_hours: durata in ore
|
||
- total_accumulation_mm: accumulo totale in mm
|
||
- max_intensity_mm_h: intensità massima oraria
|
||
- start_time: datetime di inizio
|
||
- end_time: datetime di fine (o None se continua oltre max_hours)
|
||
- is_ongoing: True se continua oltre max_hours
|
||
"""
|
||
# Codici meteo che indicano pioggia (WMO)
|
||
RAIN_WEATHER_CODES = [61, 63, 65, 66, 67, 80, 81, 82] # Pioggia leggera, moderata, forte, congelante, rovesci
|
||
|
||
if start_idx >= len(times):
|
||
return None
|
||
|
||
start_dt = parse_time_to_local(times[start_idx])
|
||
end_idx = start_idx
|
||
total_accum = 0.0
|
||
duration = 0
|
||
max_intensity = 0.0
|
||
|
||
# Analizza fino a max_hours in avanti o fino alla fine dei dati
|
||
max_idx = min(start_idx + max_hours, len(times))
|
||
|
||
for i in range(start_idx, max_idx):
|
||
precip_val = precipitation[i] if i < len(precipitation) and precipitation[i] is not None else 0.0
|
||
code = weathercode[i] if i < len(weathercode) and weathercode[i] is not None else None
|
||
|
||
# Considera pioggia significativa se:
|
||
# - precipitation > soglia (se specificata) OPPURE
|
||
# - precipitation > 0 e weather_code indica pioggia
|
||
is_rain = False
|
||
if threshold_mm_h is not None:
|
||
is_rain = precip_val >= threshold_mm_h
|
||
else:
|
||
is_rain = (precip_val > 0.0) or (code in RAIN_WEATHER_CODES)
|
||
|
||
if is_rain:
|
||
duration += 1
|
||
total_accum += precip_val
|
||
max_intensity = max(max_intensity, precip_val)
|
||
end_idx = i
|
||
else:
|
||
# Se c'è una pausa, continua comunque a cercare (potrebbe essere una pausa temporanea)
|
||
# Ma se la pausa è > 2 ore, considera l'evento terminato
|
||
pause_hours = 0
|
||
for j in range(i, min(i + 3, max_idx)):
|
||
next_precip = precipitation[j] if j < len(precipitation) and precipitation[j] is not None else 0.0
|
||
next_code = weathercode[j] if j < len(weathercode) and weathercode[j] is not None else None
|
||
next_is_rain = False
|
||
if threshold_mm_h is not None:
|
||
next_is_rain = next_precip >= threshold_mm_h
|
||
else:
|
||
next_is_rain = (next_precip > 0.0) or (next_code in RAIN_WEATHER_CODES)
|
||
|
||
if next_is_rain:
|
||
break
|
||
pause_hours += 1
|
||
|
||
# Se pausa > 2 ore, termina l'analisi
|
||
if pause_hours >= 2:
|
||
break
|
||
|
||
end_dt = parse_time_to_local(times[end_idx]) if end_idx < len(times) else None
|
||
is_ongoing = (end_idx >= max_idx - 1) and (end_idx < len(times) - 1)
|
||
|
||
return {
|
||
"duration_hours": duration,
|
||
"total_accumulation_mm": total_accum,
|
||
"max_intensity_mm_h": max_intensity,
|
||
"start_time": start_dt,
|
||
"end_time": end_dt,
|
||
"is_ongoing": is_ongoing,
|
||
"start_idx": start_idx,
|
||
"end_idx": end_idx
|
||
}
|
||
|
||
|
||
def find_rainfall_start(
|
||
times: List[str],
|
||
precipitation: List[float],
|
||
weathercode: List[int],
|
||
window_start: datetime.datetime,
|
||
window_end: datetime.datetime,
|
||
threshold_mm_h: Optional[float] = None
|
||
) -> Optional[int]:
|
||
"""
|
||
Trova l'inizio di un evento di pioggia intensa nella finestra temporale.
|
||
|
||
Returns:
|
||
Indice del primo timestamp con pioggia significativa, o None
|
||
"""
|
||
RAIN_WEATHER_CODES = [61, 63, 65, 66, 67, 80, 81, 82]
|
||
|
||
for i, t_str in enumerate(times):
|
||
try:
|
||
t_dt = parse_time_to_local(t_str)
|
||
if t_dt < window_start or t_dt > window_end:
|
||
continue
|
||
|
||
precip_val = precipitation[i] if i < len(precipitation) and precipitation[i] is not None else 0.0
|
||
code = weathercode[i] if i < len(weathercode) and weathercode[i] is not None else None
|
||
|
||
is_rain = False
|
||
if threshold_mm_h is not None:
|
||
is_rain = precip_val >= threshold_mm_h
|
||
else:
|
||
is_rain = (precip_val > 0.0) or (code in RAIN_WEATHER_CODES)
|
||
|
||
if is_rain:
|
||
return i
|
||
except Exception:
|
||
continue
|
||
|
||
return None
|
||
|
||
|
||
# =============================================================================
|
||
# CONVECTIVE STORM EVENT ANALYSIS (48h extended)
|
||
# =============================================================================
|
||
def analyze_convective_storm_event(
|
||
storm_events: List[Dict],
|
||
times: List[str],
|
||
start_idx: int,
|
||
max_hours: int = 48
|
||
) -> Optional[Dict]:
|
||
"""
|
||
Analizza un evento di temporale convettivo completo su 48 ore.
|
||
|
||
Calcola:
|
||
- Durata totale dell'evento convettivo
|
||
- Score massimo e medio
|
||
- Accumulo totale precipitazione associato
|
||
- Intensità massima (precipitazione oraria)
|
||
|
||
Args:
|
||
storm_events: Lista di eventi convettivi (da analyze_convective_risk)
|
||
times: Lista di timestamp
|
||
start_idx: Indice di inizio dell'evento
|
||
max_hours: Massimo numero di ore da analizzare (default: 48)
|
||
|
||
Returns:
|
||
Dict con:
|
||
- duration_hours: durata totale in ore
|
||
- max_score: score massimo
|
||
- avg_score: score medio
|
||
- total_precipitation_mm: accumulo totale precipitazione
|
||
- max_precipitation_mm_h: intensità massima oraria
|
||
- start_time: datetime di inizio
|
||
- end_time: datetime di fine
|
||
- is_ongoing: True se continua oltre max_hours
|
||
"""
|
||
if not storm_events:
|
||
return None
|
||
|
||
if start_idx >= len(times):
|
||
return None
|
||
|
||
# Filtra eventi nella finestra temporale
|
||
start_dt = parse_time_to_local(times[start_idx])
|
||
max_idx = min(start_idx + max_hours, len(times))
|
||
end_dt = parse_time_to_local(times[max_idx - 1]) if max_idx <= len(times) else None
|
||
|
||
# Eventi nella finestra
|
||
window_events = []
|
||
for event in storm_events:
|
||
event_dt = parse_time_to_local(event["timestamp"])
|
||
if start_dt <= event_dt <= (end_dt or event_dt):
|
||
window_events.append(event)
|
||
|
||
if not window_events:
|
||
return None
|
||
|
||
# Calcola statistiche
|
||
scores = [e["score"] for e in window_events]
|
||
precipitations = [e["precip"] for e in window_events]
|
||
|
||
duration = len(window_events)
|
||
max_score = max(scores) if scores else 0.0
|
||
avg_score = sum(scores) / len(scores) if scores else 0.0
|
||
total_precip = sum(precipitations) if precipitations else 0.0
|
||
max_precip = max(precipitations) if precipitations else 0.0
|
||
|
||
first_event = window_events[0]
|
||
last_event = window_events[-1]
|
||
event_start = parse_time_to_local(first_event["timestamp"])
|
||
event_end = parse_time_to_local(last_event["timestamp"])
|
||
|
||
is_ongoing = (max_idx >= len(times) - 1)
|
||
|
||
return {
|
||
"duration_hours": duration,
|
||
"max_score": max_score,
|
||
"avg_score": avg_score,
|
||
"total_precipitation_mm": total_precip,
|
||
"max_precipitation_mm_h": max_precip,
|
||
"start_time": event_start,
|
||
"end_time": event_end,
|
||
"is_ongoing": is_ongoing,
|
||
"event_count": len(window_events)
|
||
}
|
||
|
||
|
||
# =============================================================================
|
||
# CONVECTIVE STORM ANALYSIS (Nowcasting)
|
||
# =============================================================================
|
||
def analyze_convective_risk(icon_data: Dict, arome_data: Dict, times_base: List[str], start_idx: int, end_idx: int) -> List[Dict]:
|
||
"""
|
||
Analizza il potenziale di temporali severi combinando dati ICON Italia e AROME Seamless.
|
||
|
||
Args:
|
||
icon_data: Dati ICON Italia con LPI (Lightning Potential Index)
|
||
arome_data: Dati AROME Seamless con CAPE, Wind Gusts, Precipitation
|
||
times_base: Lista timestamp di riferimento (da AROME)
|
||
start_idx: Indice di inizio finestra analisi
|
||
end_idx: Indice di fine finestra analisi
|
||
|
||
Returns:
|
||
Lista di dizionari con dettagli rischio per ogni ora che supera soglia
|
||
Ogni dict contiene: timestamp, score, threats (lista), cape, lpi, gusts, precip
|
||
"""
|
||
if not icon_data or not arome_data:
|
||
return []
|
||
|
||
icon_hourly = icon_data.get("hourly", {}) or {}
|
||
arome_hourly = arome_data.get("hourly", {}) or {}
|
||
|
||
# Estrai dati
|
||
icon_times = icon_hourly.get("time", []) or []
|
||
|
||
# Prova diverse varianti per LPI (il nome parametro può variare)
|
||
icon_lpi = (icon_hourly.get("lightning_potential_index", []) or
|
||
icon_hourly.get("lightning_potential", []) or
|
||
icon_hourly.get("lpi", []) or
|
||
[])
|
||
|
||
# Se LPI non disponibile, usa CAPE da ICON come proxy (CAPE alto può indicare attività convettiva)
|
||
icon_cape = icon_hourly.get("cape", []) or []
|
||
# Se abbiamo CAPE da ICON ma non LPI, usiamo CAPE > 800 come indicatore di possibile attività elettrica
|
||
if not icon_lpi and icon_cape:
|
||
# Convertiamo CAPE in LPI proxy: CAPE > 800 = LPI > 0
|
||
icon_lpi = [1.0 if (cape is not None and float(cape) > 800) else 0.0 for cape in icon_cape]
|
||
|
||
arome_cape = arome_hourly.get("cape", []) or []
|
||
arome_gusts = arome_hourly.get("wind_gusts_10m", []) or []
|
||
arome_precip = arome_hourly.get("precipitation", []) or []
|
||
|
||
# Allineamento: sincronizza timestamp (ICON e AROME possono avere risoluzioni diverse)
|
||
# Per semplicità, assumiamo che abbiano la stessa risoluzione oraria e li allineiamo per indice
|
||
results = []
|
||
|
||
# Pre-calcola somme precipitazione su 3 ore per AROME
|
||
arome_precip_3h = []
|
||
for i in range(len(arome_precip)):
|
||
if i < 2:
|
||
arome_precip_3h.append(0.0)
|
||
else:
|
||
try:
|
||
sum_3h = sum(float(arome_precip[j]) for j in range(i-2, i+1) if arome_precip[j] is not None)
|
||
arome_precip_3h.append(sum_3h)
|
||
except Exception:
|
||
arome_precip_3h.append(0.0)
|
||
|
||
# Analizza ogni ora nella finestra
|
||
for i in range(start_idx, min(end_idx, len(times_base), len(arome_cape), len(arome_gusts), len(arome_precip))):
|
||
if i >= len(times_base):
|
||
break
|
||
|
||
# Estrai valori per questa ora
|
||
try:
|
||
cape_val = float(arome_cape[i]) if i < len(arome_cape) and arome_cape[i] is not None else 0.0
|
||
gusts_val = float(arome_gusts[i]) if i < len(arome_gusts) and arome_gusts[i] is not None else 0.0
|
||
precip_val = float(arome_precip[i]) if i < len(arome_precip) and arome_precip[i] is not None else 0.0
|
||
precip_3h_val = arome_precip_3h[i] if i < len(arome_precip_3h) else 0.0
|
||
except (ValueError, TypeError, IndexError):
|
||
continue
|
||
|
||
# Estrai LPI da ICON (allineamento per indice, assumendo stesso timestamp)
|
||
lpi_val = 0.0
|
||
if i < len(icon_times) and i < len(icon_lpi):
|
||
# Verifica che i timestamp corrispondano approssimativamente
|
||
try:
|
||
icon_time = parse_time_to_local(icon_times[i])
|
||
arome_time = parse_time_to_local(times_base[i])
|
||
# Se i timestamp sono entro 30 minuti, considera allineati
|
||
time_diff = abs((icon_time - arome_time).total_seconds() / 60)
|
||
if time_diff < 30:
|
||
lpi_val = float(icon_lpi[i]) if icon_lpi[i] is not None else 0.0
|
||
except (ValueError, TypeError, IndexError):
|
||
pass
|
||
|
||
# Calcola Storm Severity Score (0-100)
|
||
score = 0.0
|
||
threats = []
|
||
|
||
# 1. Componente Energia (CAPE): 0-40 punti
|
||
if cape_val > 0:
|
||
cape_score = min(40.0, (cape_val / 2000.0) * 40.0) # 2000 J/kg = 40 punti
|
||
score += cape_score
|
||
|
||
# 2. Componente Fulminazioni (LPI): 0-30 punti
|
||
if lpi_val > 0:
|
||
lpi_score = min(30.0, lpi_val * 10.0) # LPI normalizzato (assumendo scala 0-3)
|
||
score += lpi_score
|
||
|
||
# 3. Componente Dinamica (Wind Gusts + Precip): 0-30 punti
|
||
if gusts_val > WIND_GUST_DOWNBURST_THRESHOLD and precip_val > 0.1:
|
||
dynamic_score = min(30.0, ((gusts_val - WIND_GUST_DOWNBURST_THRESHOLD) / 40.0) * 30.0)
|
||
score += dynamic_score
|
||
|
||
# Identifica minacce specifiche
|
||
# Fulminazioni
|
||
if cape_val > CAPE_LIGHTNING_THRESHOLD and lpi_val > 0:
|
||
threats.append("Fulminazioni")
|
||
|
||
# Downburst/Temporale violento
|
||
if cape_val > CAPE_SEVERE_THRESHOLD and gusts_val > WIND_GUST_DOWNBURST_THRESHOLD:
|
||
threats.append("Downburst/Temporale violento")
|
||
|
||
# Nubifragio
|
||
if precip_val > RAIN_INTENSE_THRESHOLD_H or precip_3h_val > RAIN_INTENSE_THRESHOLD_3H:
|
||
threats.append("Nubifragio")
|
||
|
||
# Aggiungi risultato solo se supera soglia
|
||
if score >= STORM_SCORE_THRESHOLD or threats:
|
||
results.append({
|
||
"timestamp": times_base[i],
|
||
"score": score,
|
||
"threats": threats,
|
||
"cape": cape_val,
|
||
"lpi": lpi_val,
|
||
"gusts": gusts_val,
|
||
"precip": precip_val,
|
||
"precip_3h": precip_3h_val,
|
||
})
|
||
|
||
return results
|
||
|
||
|
||
def format_convective_alert(storm_events: List[Dict], times: List[str], start_idx: int) -> str:
|
||
"""Formatta messaggio di allerta per temporali severi con dettagli completi."""
|
||
if not storm_events:
|
||
return ""
|
||
|
||
# Analisi estesa su 48 ore
|
||
storm_analysis = analyze_convective_storm_event(storm_events, times, start_idx, max_hours=48)
|
||
|
||
# Calcola statistiche aggregate
|
||
max_score = max(e["score"] for e in storm_events)
|
||
max_cape_overall = max(e["cape"] for e in storm_events)
|
||
max_lpi_overall = max((e["lpi"] for e in storm_events if e["lpi"] > 0), default=0.0)
|
||
max_gusts_overall = max(e["gusts"] for e in storm_events)
|
||
max_precip_h_overall = max(e["precip"] for e in storm_events)
|
||
max_precip_3h_overall = max(e["precip_3h"] for e in storm_events)
|
||
|
||
# Determina il periodo complessivo
|
||
first_event = storm_events[0]
|
||
last_event = storm_events[-1]
|
||
first_time = parse_time_to_local(first_event["timestamp"])
|
||
last_time = parse_time_to_local(last_event["timestamp"])
|
||
|
||
# Usa durata dall'analisi estesa se disponibile, altrimenti conta eventi
|
||
if storm_analysis:
|
||
duration_hours = storm_analysis["duration_hours"]
|
||
total_precip = storm_analysis["total_precipitation_mm"]
|
||
max_precip_h = storm_analysis["max_precipitation_mm_h"]
|
||
else:
|
||
duration_hours = len(storm_events)
|
||
total_precip = sum(e["precip"] for e in storm_events)
|
||
max_precip_h = max_precip_h_overall
|
||
|
||
# Raggruppa per tipo di minaccia
|
||
by_threat = {}
|
||
for event in storm_events:
|
||
for threat in event.get("threats", []):
|
||
if threat not in by_threat:
|
||
by_threat[threat] = []
|
||
by_threat[threat].append(event)
|
||
|
||
# Intestazione principale con score e periodo
|
||
msg_parts = [
|
||
"⛈️ <b>ALLERTA TEMPORALI SEVERI</b>",
|
||
f"📊 <b>Storm Severity Score max:</b> <b>{max_score:.0f}/100</b>",
|
||
f"🕒 <b>Periodo rischio:</b> <b>{first_time.strftime('%d/%m %H:%M')}</b> - <b>{last_time.strftime('%d/%m %H:%M')}</b>",
|
||
f"⏱️ <b>Durata stimata:</b> ~{duration_hours} ore",
|
||
]
|
||
|
||
# Dettagli per tipo di minaccia
|
||
for threat_type, events in sorted(by_threat.items(), key=lambda x: len(x[1]), reverse=True):
|
||
if threat_type == "Fulminazioni":
|
||
msg_parts.append("\n⚡ <b>RISCHIO FULMINAZIONI</b>")
|
||
|
||
# Timeline delle fulminazioni
|
||
first = events[0]
|
||
last = events[-1]
|
||
first_time_threat = hhmm(parse_time_to_local(first["timestamp"]))
|
||
last_time_threat = hhmm(parse_time_to_local(last["timestamp"]))
|
||
|
||
# Valori specifici per questa minaccia
|
||
max_cape = max(e["cape"] for e in events)
|
||
min_cape = min((e["cape"] for e in events if e["cape"] > 0), default=0)
|
||
avg_cape = sum(e["cape"] for e in events) / len(events) if events else 0
|
||
max_lpi = max((e["lpi"] for e in events if e["lpi"] > 0), default=0.0)
|
||
hours_with_lpi = sum(1 for e in events if e["lpi"] > 0)
|
||
|
||
msg_parts.append(
|
||
f"🕒 <b>Periodo:</b> {first_time_threat} - {last_time_threat} ({len(events)} ore)\n"
|
||
f"⚡ <b>CAPE:</b> max <b>{max_cape:.0f}</b> J/kg | min {min_cape:.0f} J/kg | media {avg_cape:.0f} J/kg\n"
|
||
f"💥 <b>LPI:</b> max <b>{max_lpi:.2f}</b> | ore con attività: {hours_with_lpi}/{len(events)}\n"
|
||
f"⚠️ <i>Alta probabilità di fulminazioni. Evitare attività all'aperto.</i>"
|
||
)
|
||
|
||
elif threat_type == "Downburst/Temporale violento":
|
||
msg_parts.append("\n🌪️ <b>RISCHIO TEMPORALE VIOLENTO</b>")
|
||
|
||
first = events[0]
|
||
last = events[-1]
|
||
first_time_threat = hhmm(parse_time_to_local(first["timestamp"]))
|
||
last_time_threat = hhmm(parse_time_to_local(last["timestamp"]))
|
||
|
||
max_cape = max(e["cape"] for e in events)
|
||
min_cape = min((e["cape"] for e in events if e["cape"] > 0), default=0)
|
||
max_gusts = max(e["gusts"] for e in events)
|
||
min_gusts = min((e["gusts"] for e in events if e["gusts"] > WIND_GUST_DOWNBURST_THRESHOLD), default=0)
|
||
avg_gusts = sum(e["gusts"] for e in events) / len(events) if events else 0
|
||
|
||
# Determina livello di rischio vento
|
||
if max_gusts > 90:
|
||
wind_level = "🔴 ESTREMO"
|
||
elif max_gusts > 75:
|
||
wind_level = "🟠 ALTO"
|
||
else:
|
||
wind_level = "🟡 MODERATO"
|
||
|
||
msg_parts.append(
|
||
f"🕒 <b>Periodo:</b> {first_time_threat} - {last_time_threat} ({len(events)} ore)\n"
|
||
f"⚡ <b>CAPE:</b> max <b>{max_cape:.0f}</b> J/kg | min {min_cape:.0f} J/kg\n"
|
||
f"💨 <b>Raffiche vento:</b> max <b>{max_gusts:.0f}</b> km/h | min {min_gusts:.0f} km/h | media {avg_gusts:.0f} km/h\n"
|
||
f"🌪️ <b>Livello rischio:</b> {wind_level}\n"
|
||
f"⚠️ <i>Possibili downburst e venti distruttivi. Rimanere in luoghi sicuri.</i>"
|
||
)
|
||
|
||
elif threat_type == "Nubifragio":
|
||
msg_parts.append("\n💧 <b>RISCHIO NUBIFRAGIO</b>")
|
||
|
||
first = events[0]
|
||
last = events[-1]
|
||
first_time_threat = hhmm(parse_time_to_local(first["timestamp"]))
|
||
last_time_threat = hhmm(parse_time_to_local(last["timestamp"]))
|
||
|
||
max_precip_h = max(e["precip"] for e in events)
|
||
max_precip_3h = max(e["precip_3h"] for e in events)
|
||
avg_precip = sum(e["precip"] for e in events) / len(events) if events else 0
|
||
# Usa accumulo totale dall'analisi estesa se disponibile
|
||
if storm_analysis:
|
||
total_precip_estimate = storm_analysis["total_precipitation_mm"]
|
||
else:
|
||
total_precip_estimate = sum(e["precip"] for e in events)
|
||
|
||
# Determina intensità
|
||
if max_precip_h > 50:
|
||
intensity = "🔴 ESTREMO (>50 mm/h)"
|
||
elif max_precip_h > 30:
|
||
intensity = "🟠 ALTO (30-50 mm/h)"
|
||
elif max_precip_h > 20:
|
||
intensity = "🟡 MODERATO (20-30 mm/h)"
|
||
else:
|
||
intensity = "🟢 BASSO"
|
||
|
||
msg_parts.append(
|
||
f"🕒 <b>Periodo:</b> {first_time_threat} - {last_time_threat} ({len(events)} ore)\n"
|
||
f"🌧️ <b>Intensità:</b> max <b>{max_precip_h:.1f}</b> mm/h ({intensity})\n"
|
||
f"💧 <b>Accumulo 3h:</b> max <b>{max_precip_3h:.1f}</b> mm\n"
|
||
f"📊 <b>Media oraria:</b> {avg_precip:.1f} mm/h | <b>Accumulo totale (48h):</b> ~{total_precip_estimate:.1f} mm\n"
|
||
f"⚠️ <i>Possibili allagamenti e frane. Evitare sottopassi e zone a rischio.</i>"
|
||
)
|
||
|
||
# Riepilogo condizioni ambientali
|
||
msg_parts.append("\n📈 <b>CONDIZIONI AMBIENTALI</b>")
|
||
msg_parts.append(
|
||
f"⚡ CAPE massimo: <b>{max_cape_overall:.0f}</b> J/kg\n"
|
||
f"💥 LPI massimo: <b>{max_lpi_overall:.2f}</b>\n"
|
||
f"💨 Raffiche massime: <b>{max_gusts_overall:.0f}</b> km/h\n"
|
||
f"🌧️ Precipitazione max oraria: <b>{max_precip_h_overall:.1f}</b> mm/h"
|
||
)
|
||
|
||
return "\n".join(msg_parts)
|
||
|
||
|
||
# =============================================================================
|
||
# PERSISTENCE LOGIC
|
||
# =============================================================================
|
||
def best_wind_persistent_level(
|
||
gusts: List[float],
|
||
times: List[str],
|
||
start_idx: int,
|
||
end_idx: int,
|
||
persist_hours: int
|
||
) -> Tuple[int, float, str, int]:
|
||
"""
|
||
Returns:
|
||
(level, peak_gust_within_level, first_start_hhmm, run_length_hours)
|
||
Level 0 means not persistent above yellow.
|
||
Persistence means >= threshold for >= persist_hours consecutive hourly points.
|
||
"""
|
||
thresholds = [
|
||
(3, WIND_RED),
|
||
(2, WIND_ORANGE),
|
||
(1, WIND_YELLOW),
|
||
]
|
||
|
||
# Convert times to local datetimes once for speed/readability
|
||
dt_list = [parse_time_to_local(t) for t in times]
|
||
|
||
def find_run(threshold: float) -> Tuple[bool, float, str, int]:
|
||
consec = 0
|
||
run_start = None
|
||
run_peak = 0.0
|
||
best_start = ""
|
||
best_peak = 0.0
|
||
best_len = 0
|
||
|
||
for i in range(start_idx, end_idx):
|
||
try:
|
||
v = float(gusts[i])
|
||
except Exception:
|
||
v = 0.0
|
||
|
||
if v >= threshold:
|
||
if consec == 0:
|
||
run_start = i
|
||
run_peak = v
|
||
else:
|
||
run_peak = max(run_peak, v)
|
||
consec += 1
|
||
|
||
# record first qualifying run of required length
|
||
if consec >= persist_hours and run_start is not None:
|
||
# lock the first time a qualifying run appears, but keep peak within that run length
|
||
# Extend peak as the run continues
|
||
# If user prefers "meglio uno in più", we take the first qualifying run.
|
||
if best_len == 0:
|
||
best_start = ddmmyy_hhmm(dt_list[run_start])
|
||
best_peak = run_peak
|
||
best_len = consec
|
||
else:
|
||
# If same threshold continues longer, update peak and length
|
||
if run_start == (run_start if best_len else run_start):
|
||
best_peak = max(best_peak, run_peak)
|
||
best_len = max(best_len, consec)
|
||
else:
|
||
consec = 0
|
||
run_start = None
|
||
run_peak = 0.0
|
||
|
||
if best_len >= persist_hours:
|
||
return True, best_peak, best_start, best_len
|
||
return False, 0.0, "", 0
|
||
|
||
for lvl, thr in thresholds:
|
||
ok, peak, start_hhmm, run_len = find_run(thr)
|
||
if ok:
|
||
return lvl, peak, start_hhmm, run_len
|
||
|
||
return 0, 0.0, "", 0
|
||
|
||
|
||
def best_rain_persistent_3h(
|
||
rain: List[float],
|
||
times: List[str],
|
||
start_idx: int,
|
||
end_idx: int,
|
||
limit_3h: float,
|
||
persist_hours: int
|
||
) -> Tuple[float, str, int]:
|
||
"""
|
||
Returns:
|
||
(max_3h_sum, first_start_hhmm_of_persistent_exceedance, persistence_hours)
|
||
|
||
Compute rolling 3h sums (hourly precipitation totals).
|
||
Persistence >=2h means: at least `persist_hours` consecutive rolling windows exceed the limit.
|
||
Each shift by 1 hour -> 'persistence_hours' approximates how long intense conditions persist.
|
||
"""
|
||
if end_idx - start_idx < 3:
|
||
return 0.0, "", 0
|
||
|
||
dt_list = [parse_time_to_local(t) for t in times]
|
||
|
||
# rolling sums for each window start i (covers i, i+1, i+2)
|
||
window_starts = list(range(start_idx, end_idx - 2))
|
||
sums = []
|
||
for i in window_starts:
|
||
try:
|
||
s = float(rain[i]) + float(rain[i + 1]) + float(rain[i + 2])
|
||
except Exception:
|
||
s = 0.0
|
||
sums.append(s)
|
||
|
||
# Find persistent exceedance: sums[j] >= limit for >= persist_hours consecutive j
|
||
best_max = 0.0
|
||
best_start = ""
|
||
best_persist = 0
|
||
|
||
consec = 0
|
||
run_start_j = None
|
||
run_max = 0.0
|
||
|
||
for j, s in enumerate(sums):
|
||
if s >= limit_3h:
|
||
if consec == 0:
|
||
run_start_j = j
|
||
run_max = s
|
||
else:
|
||
run_max = max(run_max, s)
|
||
consec += 1
|
||
|
||
if consec >= persist_hours and run_start_j is not None:
|
||
# take first persistent run (meglio uno in più), but keep track of max within it
|
||
if best_persist == 0:
|
||
start_i = window_starts[run_start_j]
|
||
best_start = ddmmyy_hhmm(dt_list[start_i])
|
||
best_persist = consec
|
||
best_max = run_max
|
||
else:
|
||
# if same run continues, update
|
||
best_persist = max(best_persist, consec)
|
||
best_max = max(best_max, run_max)
|
||
else:
|
||
consec = 0
|
||
run_start_j = None
|
||
run_max = 0.0
|
||
|
||
return best_max, best_start, best_persist
|
||
|
||
|
||
# =============================================================================
|
||
# MESSAGE BUILDERS
|
||
# =============================================================================
|
||
def wind_message(level: int, peak: float, start_hhmm: str, run_len: int) -> str:
|
||
# run_len is in hours (number of consecutive hourly points)
|
||
if level == 3:
|
||
icon = "🔴"
|
||
title = "TEMPESTA (Burrasca fortissima)"
|
||
thr = WIND_RED
|
||
elif level == 2:
|
||
icon = "🟠"
|
||
title = "VENTO MOLTO FORTE"
|
||
thr = WIND_ORANGE
|
||
else:
|
||
icon = "🟡"
|
||
title = "VENTO FORTE"
|
||
thr = WIND_YELLOW
|
||
|
||
return (
|
||
f"{icon} <b>{title}</b>\n"
|
||
f"Persistenza: <b>≥ {PERSIST_HOURS} ore</b> sopra soglia ({thr:.0f} km/h).\n"
|
||
f"🕒 Inizio stimato: <b>{html.escape(start_hhmm or '—')}</b>\n"
|
||
f"📈 Picco in finestra: <b>{peak:.0f}</b> km/h (run ~{run_len}h)."
|
||
)
|
||
|
||
|
||
def rain_message(max_3h: float, start_hhmm: str, persist_h: int, rain_analysis: Optional[Dict] = None) -> str:
|
||
"""
|
||
Formatta messaggio per pioggia intensa persistente.
|
||
|
||
Args:
|
||
max_3h: Massimo accumulo su 3 ore
|
||
start_hhmm: Ora di inizio stimata
|
||
persist_h: Ore di persistenza
|
||
rain_analysis: Risultato di analyze_rainfall_event (opzionale, per analisi estesa 48h)
|
||
"""
|
||
msg_parts = [
|
||
"🌧️ <b>PIOGGIA INTENSA</b>",
|
||
f"Persistenza: <b>≥ {PERSIST_HOURS} ore</b> sopra soglia ({RAIN_3H_LIMIT:.1f} mm/3h).",
|
||
f"🕒 Inizio stimato: <b>{html.escape(start_hhmm or '—')}</b>",
|
||
f"📈 Max su 3 ore in finestra: <b>{max_3h:.1f} mm</b> (persistenza ~{persist_h}h)."
|
||
]
|
||
|
||
# Aggiungi informazioni dall'analisi estesa se disponibile
|
||
if rain_analysis:
|
||
total_mm = rain_analysis.get("total_accumulation_mm", 0.0)
|
||
duration_h = rain_analysis.get("duration_hours", 0)
|
||
max_intensity = rain_analysis.get("max_intensity_mm_h", 0.0)
|
||
end_time = rain_analysis.get("end_time")
|
||
|
||
if end_time:
|
||
end_str = end_time.strftime("%d/%m %H:%M")
|
||
msg_parts.append(f"⏱️ <b>Durata totale evento (48h):</b> ~{duration_h} ore (fino alle {end_str})")
|
||
else:
|
||
msg_parts.append(f"⏱️ <b>Durata totale evento (48h):</b> ~{duration_h} ore (in corso)")
|
||
|
||
msg_parts.append(f"💧 <b>Accumulo totale previsto:</b> ~{total_mm:.1f} mm")
|
||
msg_parts.append(f"🌧️ <b>Intensità massima oraria:</b> {max_intensity:.1f} mm/h")
|
||
|
||
return "\n".join(msg_parts)
|
||
|
||
|
||
# =============================================================================
|
||
# MAIN
|
||
# =============================================================================
|
||
def analyze(chat_ids: Optional[List[str]] = None, debug_mode: bool = False, lat: Optional[float] = None, lon: Optional[float] = None, location_name: Optional[str] = None, timezone: Optional[str] = None) -> None:
|
||
if lat is None:
|
||
lat = DEFAULT_LAT
|
||
if lon is None:
|
||
lon = DEFAULT_LON
|
||
if location_name is None:
|
||
location_name = f"Casa (LAT {lat:.4f}, LON {lon:.4f})"
|
||
|
||
LOGGER.info("--- Controllo Meteo Severo (Wind/Rain/Ice) per %s (timezone: %s) ---", location_name, timezone or TZ)
|
||
|
||
data_arome, model_used = get_forecast(lat=lat, lon=lon, timezone=timezone)
|
||
if not data_arome:
|
||
# No Telegram on errors
|
||
return
|
||
|
||
hourly_arome = (data_arome.get("hourly", {}) or {})
|
||
times = hourly_arome.get("time", []) or []
|
||
gusts_arome = hourly_arome.get("wind_gusts_10m", []) or []
|
||
rain_arome = hourly_arome.get("precipitation", []) or []
|
||
wcode_arome = hourly_arome.get("weather_code", []) or []
|
||
|
||
# Recupera dati ICON Italia per comparazione e analisi convettiva
|
||
data_icon = fetch_forecast(MODEL_ICON_IT, lat=lat, lon=lon, timezone=timezone)
|
||
hourly_icon = (data_icon.get("hourly", {}) or {}) if data_icon else {}
|
||
gusts_icon = hourly_icon.get("wind_gusts_10m", []) or []
|
||
rain_icon = hourly_icon.get("precipitation", []) or []
|
||
wcode_icon = hourly_icon.get("weather_code", []) or []
|
||
|
||
n = min(len(times), len(gusts_arome), len(rain_arome), len(wcode_arome))
|
||
if n == 0:
|
||
LOGGER.error("Empty hourly series (model=%s).", model_used)
|
||
return
|
||
|
||
times = times[:n]
|
||
gusts = gusts_arome[:n]
|
||
rain = rain_arome[:n]
|
||
wcode = wcode_arome[:n]
|
||
|
||
now = now_local()
|
||
state = load_state()
|
||
was_alarm = bool(state.get("alert_active", False))
|
||
|
||
# Find starting index: first timestep >= now
|
||
start_idx = -1
|
||
for i, t in enumerate(times):
|
||
if parse_time_to_local(t) >= now:
|
||
start_idx = i
|
||
break
|
||
if start_idx == -1:
|
||
LOGGER.error("Could not locate current time index in forecast timeline.")
|
||
return
|
||
|
||
end_idx = min(start_idx + HOURS_AHEAD, n)
|
||
if end_idx <= start_idx:
|
||
LOGGER.error("Invalid horizon window (start=%s end=%s).", start_idx, end_idx)
|
||
return
|
||
|
||
if DEBUG:
|
||
LOGGER.debug("model=%s start_idx=%s end_idx=%s (hours=%s)",
|
||
model_used, start_idx, end_idx, end_idx - start_idx)
|
||
|
||
# --- Convective storm analysis (temporali severi) ---
|
||
storm_events = []
|
||
if data_icon and data_arome:
|
||
if DEBUG:
|
||
LOGGER.debug("Avvio analisi convettiva (ICON + AROME)")
|
||
storm_events = analyze_convective_risk(data_icon, data_arome, times, start_idx, end_idx)
|
||
if DEBUG:
|
||
LOGGER.debug("Analisi convettiva completata: %d eventi rilevati", len(storm_events))
|
||
if storm_events:
|
||
for evt in storm_events[:3]: # Mostra primi 3 eventi
|
||
LOGGER.debug(" Evento: %s - Score: %.1f - Threats: %s",
|
||
hhmm(parse_time_to_local(evt["timestamp"])),
|
||
evt["score"], evt.get("threats", []))
|
||
elif DEBUG:
|
||
if not data_icon:
|
||
LOGGER.debug("Analisi convettiva saltata: dati ICON non disponibili")
|
||
if not data_arome:
|
||
LOGGER.debug("Analisi convettiva saltata: dati AROME non disponibili")
|
||
|
||
# --- Wind persistence ---
|
||
wind_level_curr, wind_peak, wind_start, wind_run_len = best_wind_persistent_level(
|
||
gusts=gusts,
|
||
times=times,
|
||
start_idx=start_idx,
|
||
end_idx=end_idx,
|
||
persist_hours=PERSIST_HOURS
|
||
)
|
||
|
||
# --- Rain persistence (3h windows) ---
|
||
rain_max_3h, rain_start, rain_persist = best_rain_persistent_3h(
|
||
rain=rain,
|
||
times=times,
|
||
start_idx=start_idx,
|
||
end_idx=end_idx,
|
||
limit_3h=RAIN_3H_LIMIT,
|
||
persist_hours=PERSIST_HOURS
|
||
)
|
||
|
||
# --- Comparazioni con ICON Italia ---
|
||
comparisons: Dict[str, Dict] = {}
|
||
|
||
# Compara vento (picco)
|
||
if len(gusts_icon) >= n and wind_level_curr > 0:
|
||
max_g_icon = 0.0
|
||
for i in range(start_idx, end_idx):
|
||
if i < len(gusts_icon) and gusts_icon[i] is not None:
|
||
max_g_icon = max(max_g_icon, float(gusts_icon[i]))
|
||
comp_wind = compare_values(wind_peak, max_g_icon) if max_g_icon > 0 else None
|
||
if comp_wind:
|
||
comparisons["wind"] = comp_wind
|
||
|
||
# Compara pioggia (max 3h)
|
||
if len(rain_icon) >= n and rain_max_3h > 0:
|
||
# Calcola max 3h ICON
|
||
max_3h_icon = 0.0
|
||
for i in range(start_idx, min(end_idx - 2, len(rain_icon) - 2)):
|
||
if all(rain_icon[i+j] is not None for j in range(3)):
|
||
sum_3h = sum(float(rain_icon[i+j]) for j in range(3))
|
||
max_3h_icon = max(max_3h_icon, sum_3h)
|
||
comp_rain = compare_values(rain_max_3h, max_3h_icon) if max_3h_icon > 0 else None
|
||
if comp_rain:
|
||
comparisons["rain"] = comp_rain
|
||
|
||
# --- Decide notifications ---
|
||
alerts: List[str] = []
|
||
should_notify = False
|
||
|
||
# 1) Convective storms (temporali severi) - priorità alta
|
||
if storm_events:
|
||
prev_storm_active = bool(state.get("convective_storm_active", False))
|
||
max_score = max(e["score"] for e in storm_events)
|
||
prev_score = float(state.get("last_storm_score", 0.0) or 0.0)
|
||
|
||
# Notifica se: nuovo evento, o score aumenta significativamente (+15 punti)
|
||
if debug_mode or not prev_storm_active or (max_score >= prev_score + 15.0):
|
||
if debug_mode:
|
||
LOGGER.info("[DEBUG MODE] Bypass anti-spam: invio forzato per temporali severi")
|
||
convective_msg = format_convective_alert(storm_events, times, start_idx)
|
||
if convective_msg:
|
||
alerts.append(convective_msg)
|
||
should_notify = True
|
||
state["convective_storm_active"] = True
|
||
state["last_storm_score"] = float(max_score)
|
||
else:
|
||
state["convective_storm_active"] = False
|
||
state["last_storm_score"] = 0.0
|
||
|
||
# 2) Wind (persistent)
|
||
if wind_level_curr > 0:
|
||
prev_level = int(state.get("wind_level", 0) or 0)
|
||
if debug_mode or (not was_alarm) or (wind_level_curr > prev_level):
|
||
if debug_mode:
|
||
LOGGER.info("[DEBUG MODE] Bypass anti-spam: invio forzato per vento")
|
||
wind_msg = wind_message(wind_level_curr, wind_peak, wind_start, wind_run_len)
|
||
if "wind" in comparisons:
|
||
comp = comparisons["wind"]
|
||
wind_msg += f"\n⚠️ <b>Discordanza modelli</b>: AROME {comp['arome']:.0f} km/h | ICON {comp['icon']:.0f} km/h (scostamento {comp['diff_pct']:.0f}%)"
|
||
alerts.append(wind_msg)
|
||
should_notify = True
|
||
state["wind_level"] = wind_level_curr
|
||
state["last_wind_peak"] = float(wind_peak)
|
||
else:
|
||
state["wind_level"] = 0
|
||
state["last_wind_peak"] = 0.0
|
||
|
||
# 3) Rain (persistent)
|
||
if rain_persist >= PERSIST_HOURS and rain_max_3h >= RAIN_3H_LIMIT:
|
||
prev_rain = float(state.get("last_rain_3h", 0.0) or 0.0)
|
||
# "Meglio uno in più": notifica anche al primo superamento persistente,
|
||
# e ri-notifica se peggiora di >= +10mm sul massimo 3h
|
||
if debug_mode or (not was_alarm) or (rain_max_3h >= prev_rain + 10.0):
|
||
if debug_mode:
|
||
LOGGER.info("[DEBUG MODE] Bypass anti-spam: invio forzato per pioggia")
|
||
|
||
# Analisi estesa su 48 ore per pioggia intensa
|
||
rain_analysis = None
|
||
if rain_start:
|
||
# Trova l'indice di inizio dell'evento cercando il timestamp corrispondente
|
||
rain_start_idx = -1
|
||
for i, t in enumerate(times):
|
||
try:
|
||
t_dt = parse_time_to_local(t)
|
||
if ddmmyy_hhmm(t_dt) == rain_start:
|
||
rain_start_idx = i
|
||
break
|
||
except Exception:
|
||
continue
|
||
|
||
if rain_start_idx >= 0 and rain_start_idx < len(times):
|
||
# Usa soglia minima per considerare pioggia significativa (8 mm/h, coerente con RAIN_INTENSE_THRESHOLD_H)
|
||
rain_analysis = analyze_rainfall_event(
|
||
times=times,
|
||
precipitation=rain,
|
||
weathercode=wcode,
|
||
start_idx=rain_start_idx,
|
||
max_hours=48,
|
||
threshold_mm_h=8.0 # Soglia per pioggia intensa
|
||
)
|
||
|
||
rain_msg = rain_message(rain_max_3h, rain_start, rain_persist, rain_analysis=rain_analysis)
|
||
if "rain" in comparisons:
|
||
comp = comparisons["rain"]
|
||
rain_msg += f"\n⚠️ <b>Discordanza modelli</b>: AROME {comp['arome']:.1f} mm | ICON {comp['icon']:.1f} mm (scostamento {comp['diff_pct']:.0f}%)"
|
||
alerts.append(rain_msg)
|
||
should_notify = True
|
||
state["last_rain_3h"] = float(rain_max_3h)
|
||
else:
|
||
state["last_rain_3h"] = 0.0
|
||
|
||
is_alarm_now = (
|
||
(storm_events is not None and len(storm_events) > 0)
|
||
or (wind_level_curr > 0)
|
||
or (rain_persist >= PERSIST_HOURS and rain_max_3h >= RAIN_3H_LIMIT)
|
||
)
|
||
|
||
# In modalità debug, forza invio anche se non ci sono allerte
|
||
debug_message_only = False
|
||
if debug_mode and not alerts:
|
||
LOGGER.info("[DEBUG MODE] Nessuna allerta, ma creo messaggio informativo")
|
||
alerts.append("ℹ️ <i>Nessuna condizione meteo severa rilevata nelle prossime %s ore.</i>" % HOURS_AHEAD)
|
||
should_notify = True
|
||
debug_message_only = True # Segnala che è solo un messaggio debug, non una vera allerta
|
||
|
||
# --- Send only on alerts (never on errors) ---
|
||
if should_notify and alerts:
|
||
headline = "⚠️ <b>AVVISO METEO SEVERO</b>"
|
||
model_info = model_used
|
||
if comparisons:
|
||
model_info = f"{model_used} + ICON Italia (discordanza rilevata)"
|
||
|
||
# Se ci sono temporali severi, aggiungi informazioni sui modelli usati
|
||
if storm_events:
|
||
model_info = f"{model_used} + ICON Italia (analisi convettiva combinata)"
|
||
|
||
meta = (
|
||
f"📍 <code>{html.escape(location_name)}</code>\n"
|
||
f"🕒 <code>Finestra: prossime {HOURS_AHEAD} ore</code>\n"
|
||
f"🛰️ <code>Modello: {html.escape(model_info)}</code>\n"
|
||
f"⏱️ <code>Persistenza minima: {PERSIST_HOURS} ore</code>\n"
|
||
)
|
||
body = "\n\n".join(alerts)
|
||
footer = "\n\n<i>Fonte dati: Open-Meteo | Analisi nowcasting per temporali severi</i>"
|
||
msg = f"{headline}\n{meta}\n{body}{footer}"
|
||
|
||
ok = telegram_send_html(msg, chat_ids=chat_ids)
|
||
if ok:
|
||
LOGGER.info("Alert sent successfully.")
|
||
else:
|
||
LOGGER.warning("Alert NOT sent (token missing or Telegram error).")
|
||
|
||
# IMPORTANTE: Imposta alert_active = True solo se c'è una vera allerta,
|
||
# non se è solo un messaggio informativo in modalità debug
|
||
if not debug_message_only:
|
||
# Determina il tipo di allerta basandosi sulle condizioni attuali
|
||
alert_types = []
|
||
if storm_events and len(storm_events) > 0:
|
||
alert_types.append("TEMPORALI SEVERI")
|
||
if wind_level_curr > 0:
|
||
wind_labels = {3: "TEMPESTA", 2: "VENTO MOLTO FORTE", 1: "VENTO FORTE"}
|
||
alert_types.append(wind_labels.get(wind_level_curr, "VENTO FORTE"))
|
||
if rain_persist >= PERSIST_HOURS and rain_max_3h >= RAIN_3H_LIMIT:
|
||
alert_types.append("PIOGGIA INTENSA")
|
||
|
||
state["alert_active"] = True
|
||
state["last_alert_type"] = alert_types if alert_types else None
|
||
state["last_alert_time"] = now.isoformat()
|
||
save_state(state)
|
||
else:
|
||
# In debug mode senza vere allerte, non modificare alert_active
|
||
LOGGER.debug("[DEBUG MODE] Messaggio inviato ma alert_active non modificato (nessuna vera allerta)")
|
||
return
|
||
|
||
# Optional: cleared message (transition only)
|
||
if SEND_ALL_CLEAR and was_alarm and (not is_alarm_now):
|
||
# Recupera informazioni sull'allerta che è rientrata
|
||
last_alert_type = state.get("last_alert_type")
|
||
last_alert_time_str = state.get("last_alert_time")
|
||
|
||
# Formatta il tipo di allerta
|
||
alert_type_text = ""
|
||
if last_alert_type:
|
||
if isinstance(last_alert_type, list):
|
||
alert_type_text = " + ".join(last_alert_type)
|
||
else:
|
||
alert_type_text = str(last_alert_type)
|
||
|
||
# Formatta l'ora di notifica
|
||
alert_time_text = ""
|
||
if last_alert_time_str:
|
||
try:
|
||
alert_time_dt = parse_time_to_local(last_alert_time_str)
|
||
alert_time_text = ddmmyy_hhmm(alert_time_dt)
|
||
except Exception:
|
||
try:
|
||
# Fallback: prova a parsare come ISO
|
||
alert_time_dt = parser.isoparse(last_alert_time_str)
|
||
if alert_time_dt.tzinfo is None:
|
||
alert_time_dt = alert_time_dt.replace(tzinfo=TZINFO)
|
||
else:
|
||
alert_time_dt = alert_time_dt.astimezone(TZINFO)
|
||
alert_time_text = ddmmyy_hhmm(alert_time_dt)
|
||
except Exception:
|
||
alert_time_text = last_alert_time_str
|
||
|
||
# Costruisci il messaggio
|
||
msg_parts = [
|
||
"🟢 <b>ALLERTA METEO RIENTRATA</b>",
|
||
"Condizioni rientrate sotto le soglie di guardia."
|
||
]
|
||
|
||
if alert_type_text:
|
||
msg_parts.append(f"📋 <b>Tipo allerta rientrata:</b> {html.escape(alert_type_text)}")
|
||
|
||
if alert_time_text:
|
||
msg_parts.append(f"🕐 <b>Notificata alle:</b> {html.escape(alert_time_text)}")
|
||
|
||
msg_parts.extend([
|
||
f"🕒 <code>Finestra: prossime {HOURS_AHEAD} ore</code>",
|
||
f"🛰️ <code>Modello: {html.escape(model_used)}</code>",
|
||
f"⏱️ <code>Persistenza minima: {PERSIST_HOURS} ore</code>"
|
||
])
|
||
|
||
msg = "\n".join(msg_parts)
|
||
ok = telegram_send_html(msg, chat_ids=chat_ids)
|
||
if ok:
|
||
LOGGER.info("All-clear sent successfully.")
|
||
else:
|
||
LOGGER.warning("All-clear NOT sent (token missing or Telegram error).")
|
||
|
||
state = {
|
||
"alert_active": False,
|
||
"wind_level": 0,
|
||
"last_wind_peak": 0.0,
|
||
"last_rain_3h": 0.0,
|
||
"convective_storm_active": False,
|
||
"last_storm_score": 0.0,
|
||
"last_alert_type": None,
|
||
"last_alert_time": None,
|
||
}
|
||
save_state(state)
|
||
return
|
||
|
||
# No new alert
|
||
state["alert_active"] = bool(is_alarm_now)
|
||
save_state(state)
|
||
storm_count = len(storm_events) if storm_events else 0
|
||
LOGGER.info(
|
||
"No new alert. model=%s wind_level=%s rain3h=%.1fmm(persist=%sh) storms=%d",
|
||
model_used, wind_level_curr, rain_max_3h, rain_persist, storm_count
|
||
)
|
||
if DEBUG and storm_events:
|
||
max_score = max(e["score"] for e in storm_events)
|
||
LOGGER.debug("Storm events present (max_score=%.1f) but below notification threshold", max_score)
|
||
|
||
|
||
if __name__ == "__main__":
|
||
arg_parser = argparse.ArgumentParser(description="Severe weather alert")
|
||
arg_parser.add_argument("--debug", action="store_true", help="Invia messaggi solo all'admin (chat ID: %s)" % TELEGRAM_CHAT_IDS[0])
|
||
arg_parser.add_argument("--lat", type=float, help="Latitudine (default: Casa)")
|
||
arg_parser.add_argument("--lon", type=float, help="Longitudine (default: Casa)")
|
||
arg_parser.add_argument("--location", help="Nome località (default: Casa)")
|
||
arg_parser.add_argument("--timezone", help="Timezone IANA (es: Europe/Rome, America/New_York)")
|
||
arg_parser.add_argument("--chat_id", help="Chat ID Telegram per invio diretto (opzionale, può essere multiplo separato da virgola)")
|
||
arg_parser.add_argument("--check_viaggi", action="store_true", help="Controlla viaggi attivi e invia per tutte le localizzazioni")
|
||
args = arg_parser.parse_args()
|
||
|
||
# Se --check_viaggi, controlla viaggi attivi e invia per tutte le localizzazioni
|
||
if args.check_viaggi:
|
||
VIAGGI_STATE_FILE = os.path.join(BASE_DIR, "viaggi_attivi.json")
|
||
if os.path.exists(VIAGGI_STATE_FILE):
|
||
try:
|
||
with open(VIAGGI_STATE_FILE, "r", encoding="utf-8") as f:
|
||
viaggi = json.load(f) or {}
|
||
for chat_id, viaggio in viaggi.items():
|
||
LOGGER.info("Processando viaggio attivo per chat_id=%s: %s", chat_id, viaggio.get("name"))
|
||
analyze(
|
||
chat_ids=[chat_id],
|
||
debug_mode=False,
|
||
lat=viaggio.get("lat"),
|
||
lon=viaggio.get("lon"),
|
||
location_name=viaggio.get("name"),
|
||
timezone=viaggio.get("timezone")
|
||
)
|
||
time.sleep(1) # Pausa tra invii
|
||
except Exception as e:
|
||
LOGGER.exception("Errore lettura viaggi attivi: %s", e)
|
||
# Invia anche per Casa (comportamento normale)
|
||
analyze(chat_ids=None, debug_mode=args.debug)
|
||
else:
|
||
# Comportamento normale: determina chat_ids
|
||
chat_ids = None
|
||
if args.chat_id:
|
||
chat_ids = [cid.strip() for cid in args.chat_id.split(",") if cid.strip()]
|
||
elif args.debug:
|
||
chat_ids = [TELEGRAM_CHAT_IDS[0]]
|
||
|
||
analyze(chat_ids=chat_ids, debug_mode=args.debug, lat=args.lat, lon=args.lon, location_name=args.location, timezone=args.timezone)
|