Files
loogle-scripts/services/telegram-bot/severe_weather_circondario.py

636 lines
23 KiB
Python
Executable File
Raw Blame History

This file contains invisible Unicode characters
This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import datetime
import html
import json
import logging
import os
import time
from logging.handlers import RotatingFileHandler
from typing import Dict, List, Optional, Tuple
from zoneinfo import ZoneInfo
import requests
from dateutil import parser
# =============================================================================
# SEVERE WEATHER ALERT CIRCONDARIO (next 48h) - Analisi Temporali Severi
# - Analizza rischio temporali severi per 9 località del circondario
# - Fulminazioni elevate (CAPE > 800 J/kg + LPI > 0)
# - Downburst (CAPE > 1500 J/kg + Wind Gusts > 60 km/h)
# - Nubifragi (Precipitation > 20mm/h o somma 3h > 40mm)
# - Rischio Alluvioni (precipitazioni intense e prolungate)
#
# Telegram token: NOT in clear.
# Read order:
# 1) env TELEGRAM_BOT_TOKEN
# 2) ~/.telegram_dpc_bot_token
# 3) /etc/telegram_dpc_bot_token
#
# Debug:
# DEBUG=1 python3 severe_weather_circondario.py
#
# Log:
# ./weather_alert_circondario.log (same folder as this script)
# =============================================================================
DEBUG = os.environ.get("DEBUG", "0").strip() == "1"
# ----------------- TELEGRAM -----------------
TELEGRAM_CHAT_IDS = ["64463169", "24827341", "132455422", "5405962012"]
TOKEN_FILE_HOME = os.path.expanduser("~/.telegram_dpc_bot_token")
TOKEN_FILE_ETC = "/etc/telegram_dpc_bot_token"
# ----------------- LOCALITÀ CIRCONDARIO -----------------
# Coordinate delle località da monitorare
LOCALITA_CIRCONDARIO = [
{"name": "Bologna", "lat": 44.4938, "lon": 11.3387},
{"name": "Imola", "lat": 44.3552, "lon": 11.7164},
{"name": "Faenza", "lat": 44.2856, "lon": 11.8798},
{"name": "Ravenna", "lat": 44.4175, "lon": 12.1996},
{"name": "Forlì", "lat": 44.2231, "lon": 12.0401},
{"name": "Cesena", "lat": 44.1390, "lon": 12.2435},
{"name": "Rimini", "lat": 44.0678, "lon": 12.5695},
{"name": "Riccione", "lat": 44.0015, "lon": 12.6484},
{"name": "Pesaro", "lat": 43.9100, "lon": 12.9133},
]
# ----------------- THRESHOLDS -----------------
HOURS_AHEAD = 24 # Analisi 24 ore
# ----------------- CONVECTIVE STORM THRESHOLDS -----------------
CAPE_LIGHTNING_THRESHOLD = 800.0 # J/kg - Soglia per rischio fulminazioni
CAPE_SEVERE_THRESHOLD = 1500.0 # J/kg - Soglia per temporali violenti
WIND_GUST_DOWNBURST_THRESHOLD = 60.0 # km/h - Soglia vento per downburst
RAIN_INTENSE_THRESHOLD_H = 20.0 # mm/h - Soglia per nubifragio orario
RAIN_INTENSE_THRESHOLD_3H = 40.0 # mm/3h - Soglia per nubifragio su 3 ore
RAIN_FLOOD_THRESHOLD_24H = 100.0 # mm/24h - Soglia per rischio alluvioni
STORM_SCORE_THRESHOLD = 40.0 # Storm Severity Score minimo per allerta
# ----------------- FILES -----------------
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATE_FILE = os.path.join(BASE_DIR, "weather_state_circondario.json")
LOG_FILE = os.path.join(BASE_DIR, "weather_alert_circondario.log")
# ----------------- OPEN-METEO -----------------
OPEN_METEO_URL = "https://api.open-meteo.com/v1/forecast"
GEOCODING_URL = "https://geocoding-api.open-meteo.com/v1/search"
TZ = "Europe/Rome" # Timezone Italia per il circondario
TZINFO = ZoneInfo(TZ)
HTTP_HEADERS = {"User-Agent": "rpi-severe-weather-circondario/1.0"}
# Modelli meteo
MODEL_PRIMARY = "meteofrance_seamless"
MODEL_FALLBACK = "meteofrance_arome_france_hd"
MODEL_ICON_IT = "italia_meteo_arpae_icon_2i"
# =============================================================================
# LOGGING
# =============================================================================
def setup_logger() -> logging.Logger:
logger = logging.getLogger("severe_weather_circondario")
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
logger.handlers.clear()
fh = RotatingFileHandler(LOG_FILE, maxBytes=1_000_000, backupCount=5, encoding="utf-8")
fh.setLevel(logging.DEBUG)
fmt = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
fh.setFormatter(fmt)
logger.addHandler(fh)
if DEBUG:
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(fmt)
logger.addHandler(sh)
return logger
LOGGER = setup_logger()
# =============================================================================
# UTILS
# =============================================================================
def ensure_parent_dir(path: str) -> None:
parent = os.path.dirname(path)
if parent and not os.path.exists(parent):
os.makedirs(parent, exist_ok=True)
def now_local() -> datetime.datetime:
return datetime.datetime.now(TZINFO)
def read_text_file(path: str) -> str:
try:
with open(path, "r", encoding="utf-8") as f:
return f.read().strip()
except FileNotFoundError:
return ""
except PermissionError:
LOGGER.debug("Permission denied reading %s", path)
return ""
except Exception as e:
LOGGER.exception("Error reading %s: %s", path, e)
return ""
def load_bot_token() -> str:
tok = os.environ.get("TELEGRAM_BOT_TOKEN", "").strip()
if tok:
return tok
tok = read_text_file(TOKEN_FILE_HOME)
if tok:
return tok
tok = read_text_file(TOKEN_FILE_ETC)
return tok.strip() if tok else ""
def parse_time_to_local(t: str) -> datetime.datetime:
"""Robust timezone handling."""
dt = parser.isoparse(t)
if dt.tzinfo is None:
return dt.replace(tzinfo=TZINFO)
return dt.astimezone(TZINFO)
def hhmm(dt: datetime.datetime) -> str:
return dt.strftime("%H:%M")
def ddmmyyhhmm(dt: datetime.datetime) -> str:
return dt.strftime("%d/%m %H:%M")
# =============================================================================
# TELEGRAM
# =============================================================================
def telegram_send_html(message_html: str, chat_ids: Optional[List[str]] = None) -> bool:
"""Never raises. Returns True if at least one chat_id succeeded."""
token = load_bot_token()
if not token:
LOGGER.warning("Telegram token missing: message not sent.")
return False
if chat_ids is None:
chat_ids = TELEGRAM_CHAT_IDS
url = f"https://api.telegram.org/bot{token}/sendMessage"
base_payload = {
"text": message_html,
"parse_mode": "HTML",
"disable_web_page_preview": True,
}
sent_ok = False
with requests.Session() as s:
for chat_id in chat_ids:
payload = dict(base_payload)
payload["chat_id"] = chat_id
try:
resp = s.post(url, json=payload, timeout=15)
if resp.status_code == 200:
sent_ok = True
else:
LOGGER.error("Telegram error chat_id=%s status=%s body=%s",
chat_id, resp.status_code, resp.text[:500])
time.sleep(0.25)
except Exception as e:
LOGGER.exception("Telegram exception chat_id=%s err=%s", chat_id, e)
return sent_ok
# =============================================================================
# STATE
# =============================================================================
def load_state() -> Dict:
default = {
"alert_active": False,
"locations": {}, # {location_name: {"last_score": 0.0, "last_storm_time": None}}
}
if os.path.exists(STATE_FILE):
try:
with open(STATE_FILE, "r", encoding="utf-8") as f:
data = json.load(f) or {}
default.update(data)
except Exception as e:
LOGGER.exception("State read error: %s", e)
return default
def save_state(state: Dict) -> None:
try:
ensure_parent_dir(STATE_FILE)
with open(STATE_FILE, "w", encoding="utf-8") as f:
json.dump(state, f, ensure_ascii=False, indent=2)
except Exception as e:
LOGGER.exception("State write error: %s", e)
# =============================================================================
# OPEN-METEO
# =============================================================================
def fetch_forecast(models_value: str, lat: float, lon: float) -> Optional[Dict]:
params = {
"latitude": lat,
"longitude": lon,
"hourly": "precipitation,wind_gusts_10m,weather_code,cape",
"timezone": TZ,
"forecast_days": 2,
"wind_speed_unit": "kmh",
"precipitation_unit": "mm",
"models": models_value,
}
# Aggiungi CAPE e parametri convettivi
if models_value == MODEL_PRIMARY or models_value == MODEL_FALLBACK:
params["hourly"] += ",convective_inhibition"
elif models_value == MODEL_ICON_IT:
params["hourly"] += ",cape" # ICON potrebbe avere CAPE
try:
r = requests.get(OPEN_METEO_URL, params=params, headers=HTTP_HEADERS, timeout=25)
if r.status_code == 400:
try:
j = r.json()
LOGGER.error("Open-Meteo 400 (models=%s, lat=%.4f, lon=%.4f): %s",
models_value, lat, lon, j.get("reason", j))
except Exception:
LOGGER.error("Open-Meteo 400 (models=%s): %s", models_value, r.text[:500])
return None
r.raise_for_status()
return r.json()
except Exception as e:
LOGGER.exception("Open-Meteo request error (models=%s, lat=%.4f, lon=%.4f): %s",
models_value, lat, lon, e)
return None
def get_forecast(lat: float, lon: float) -> Tuple[Optional[Dict], Optional[Dict], str]:
"""Ritorna (arome_data, icon_data, model_used)"""
LOGGER.debug("Requesting Open-Meteo for lat=%.4f lon=%.4f", lat, lon)
# Prova AROME Seamless
data_arome = fetch_forecast(MODEL_PRIMARY, lat, lon)
model_used = MODEL_PRIMARY
if data_arome is None:
LOGGER.warning("Primary model failed (%s). Trying fallback=%s", MODEL_PRIMARY, MODEL_FALLBACK)
data_arome = fetch_forecast(MODEL_FALLBACK, lat, lon)
model_used = MODEL_FALLBACK
# Prova ICON Italia per LPI
data_icon = fetch_forecast(MODEL_ICON_IT, lat, lon)
return data_arome, data_icon, model_used
# =============================================================================
# CONVECTIVE STORM ANALYSIS (from severe_weather.py)
# =============================================================================
def analyze_convective_risk(icon_data: Dict, arome_data: Dict, times_base: List[str],
start_idx: int, end_idx: int) -> List[Dict]:
"""Analizza il potenziale di temporali severi combinando dati ICON Italia e AROME Seamless."""
if not icon_data or not arome_data:
return []
icon_hourly = icon_data.get("hourly", {}) or {}
arome_hourly = arome_data.get("hourly", {}) or {}
icon_times = icon_hourly.get("time", []) or []
icon_lpi = (icon_hourly.get("lightning_potential_index", []) or
icon_hourly.get("lightning_potential", []) or
icon_hourly.get("lpi", []) or [])
icon_cape = icon_hourly.get("cape", []) or []
if not icon_lpi and icon_cape:
icon_lpi = [1.0 if (cape is not None and float(cape) > 800) else 0.0 for cape in icon_cape]
arome_cape = arome_hourly.get("cape", []) or []
arome_gusts = arome_hourly.get("wind_gusts_10m", []) or []
arome_precip = arome_hourly.get("precipitation", []) or []
results = []
# Pre-calcola somme precipitazione
arome_precip_3h = []
for i in range(len(arome_precip)):
if i < 2:
arome_precip_3h.append(0.0)
else:
try:
sum_3h = sum(float(arome_precip[j]) for j in range(i-2, i+1) if arome_precip[j] is not None)
arome_precip_3h.append(sum_3h)
except Exception:
arome_precip_3h.append(0.0)
# Pre-calcola somma 24h per rischio alluvioni
arome_precip_24h = []
for i in range(len(arome_precip)):
if i < 23:
arome_precip_24h.append(0.0)
else:
try:
sum_24h = sum(float(arome_precip[j]) for j in range(i-23, i+1) if arome_precip[j] is not None)
arome_precip_24h.append(sum_24h)
except Exception:
arome_precip_24h.append(0.0)
# Analizza ogni ora
for i in range(start_idx, min(end_idx, len(times_base), len(arome_cape), len(arome_gusts), len(arome_precip))):
if i >= len(times_base):
break
try:
cape_val = float(arome_cape[i]) if i < len(arome_cape) and arome_cape[i] is not None else 0.0
gusts_val = float(arome_gusts[i]) if i < len(arome_gusts) and arome_gusts[i] is not None else 0.0
precip_val = float(arome_precip[i]) if i < len(arome_precip) and arome_precip[i] is not None else 0.0
precip_3h_val = arome_precip_3h[i] if i < len(arome_precip_3h) else 0.0
precip_24h_val = arome_precip_24h[i] if i < len(arome_precip_24h) else 0.0
except (ValueError, TypeError, IndexError):
continue
lpi_val = 0.0
if i < len(icon_times) and i < len(icon_lpi):
try:
icon_time = parse_time_to_local(icon_times[i])
arome_time = parse_time_to_local(times_base[i])
time_diff = abs((icon_time - arome_time).total_seconds() / 60)
if time_diff < 30:
lpi_val = float(icon_lpi[i]) if icon_lpi[i] is not None else 0.0
except (ValueError, TypeError, IndexError):
pass
# Calcola Storm Severity Score
score = 0.0
threats = []
if cape_val > 0:
cape_score = min(40.0, (cape_val / 2000.0) * 40.0)
score += cape_score
if lpi_val > 0:
if lpi_val == 1.0:
lpi_score = 20.0
else:
lpi_score = min(30.0, lpi_val * 10.0)
score += lpi_score
if gusts_val > WIND_GUST_DOWNBURST_THRESHOLD and precip_val > 0.1:
dynamic_score = min(30.0, ((gusts_val - WIND_GUST_DOWNBURST_THRESHOLD) / 40.0) * 30.0)
score += dynamic_score
# Identifica minacce
if cape_val > CAPE_LIGHTNING_THRESHOLD and lpi_val > 0:
threats.append("Fulminazioni")
if cape_val > CAPE_SEVERE_THRESHOLD and gusts_val > WIND_GUST_DOWNBURST_THRESHOLD:
threats.append("Downburst/Temporale violento")
if precip_val > RAIN_INTENSE_THRESHOLD_H or precip_3h_val > RAIN_INTENSE_THRESHOLD_3H:
threats.append("Nubifragio")
# Rischio alluvioni: precipitazioni intense e prolungate (accumulo 24h > 100mm)
if precip_24h_val > RAIN_FLOOD_THRESHOLD_24H:
threats.append("Rischio Alluvioni")
# Bonus al score per rischio alluvioni
flood_bonus = min(10.0, (precip_24h_val - RAIN_FLOOD_THRESHOLD_24H) / 10.0)
score += flood_bonus
if score >= STORM_SCORE_THRESHOLD or threats:
results.append({
"timestamp": times_base[i],
"score": score,
"threats": threats,
"cape": cape_val,
"lpi": lpi_val,
"gusts": gusts_val,
"precip": precip_val,
"precip_3h": precip_3h_val,
"precip_24h": precip_24h_val,
})
return results
# =============================================================================
# MESSAGE FORMATTING
# =============================================================================
def format_location_alert(location_name: str, storm_events: List[Dict]) -> str:
"""Formatta alert per una singola località."""
if not storm_events:
return ""
max_score = max(e["score"] for e in storm_events)
first_time = parse_time_to_local(storm_events[0]["timestamp"])
last_time = parse_time_to_local(storm_events[-1]["timestamp"])
duration_hours = len(storm_events)
# Raggruppa minacce
all_threats = set()
for event in storm_events:
all_threats.update(event.get("threats", []))
threats_str = ", ".join(all_threats) if all_threats else "Temporali severi"
max_cape = max(e["cape"] for e in storm_events)
max_precip_24h = max((e.get("precip_24h", 0) for e in storm_events), default=0)
msg = (
f"📍 <b>{html.escape(location_name)}</b>\n"
f"📊 Score: <b>{max_score:.0f}/100</b> | {threats_str}\n"
f"🕒 {ddmmyyhhmm(first_time)} - {ddmmyyhhmm(last_time)} (~{duration_hours}h)\n"
f"⚡ CAPE max: {max_cape:.0f} J/kg"
)
if max_precip_24h > RAIN_FLOOD_THRESHOLD_24H:
msg += f" | 💧 Accumulo 24h: <b>{max_precip_24h:.1f} mm</b> ⚠️"
return msg
def format_circondario_alert(locations_data: Dict[str, List[Dict]]) -> str:
"""Formatta alert aggregato per tutto il circondario."""
if not locations_data:
return ""
headline = "⛈️ <b>ALLERTA TEMPORALI SEVERI - CIRCONDARIO</b>"
# Statistiche aggregate
total_locations = len(locations_data)
max_score_overall = max(
max((e["score"] for e in events), default=0)
for events in locations_data.values()
)
# Trova prima e ultima occorrenza
all_times = []
for events in locations_data.values():
for event in events:
all_times.append(parse_time_to_local(event["timestamp"]))
if all_times:
first_time_overall = min(all_times)
last_time_overall = max(all_times)
period_str = f"{ddmmyyhhmm(first_time_overall)} - {ddmmyyhhmm(last_time_overall)}"
else:
period_str = "N/A"
meta = (
f"📍 <b>{total_locations} località</b> con rischio temporali severi\n"
f"📊 <b>Storm Severity Score max:</b> <b>{max_score_overall:.0f}/100</b>\n"
f"🕒 <b>Periodo:</b> {period_str}\n"
f"🛰️ <b>Modelli:</b> AROME Seamless + ICON Italia\n"
)
# Lista località
location_parts = []
for loc_name, events in sorted(locations_data.items()):
loc_msg = format_location_alert(loc_name, events)
if loc_msg:
location_parts.append(loc_msg)
body = "\n\n".join(location_parts)
footer = "\n\n<i>Fonte dati: Open-Meteo | Analisi nowcasting temporali severi</i>"
return f"{headline}\n{meta}\n{body}{footer}"
# =============================================================================
# MAIN ANALYSIS
# =============================================================================
def analyze_location(location: Dict) -> Optional[List[Dict]]:
"""Analizza rischio temporali severi per una singola località."""
name = location["name"]
lat = location["lat"]
lon = location["lon"]
LOGGER.debug("Analizzando %s (%.4f, %.4f)", name, lat, lon)
data_arome, data_icon, model_used = get_forecast(lat, lon)
if not data_arome:
LOGGER.warning("Nessun dato AROME per %s", name)
return None
hourly_arome = (data_arome.get("hourly", {}) or {})
times = hourly_arome.get("time", []) or []
if not times:
LOGGER.warning("Nessun timestamp per %s", name)
return None
# Trova finestra temporale
now = now_local()
start_idx = -1
for i, t in enumerate(times):
if parse_time_to_local(t) >= now:
start_idx = i
break
if start_idx == -1:
LOGGER.warning("Nessun indice di partenza valido per %s", name)
return None
end_idx = min(start_idx + HOURS_AHEAD, len(times))
if data_icon:
storm_events = analyze_convective_risk(data_icon, data_arome, times, start_idx, end_idx)
if DEBUG and storm_events:
LOGGER.debug(" %s: %d eventi rilevati", name, len(storm_events))
return storm_events
else:
LOGGER.warning("Nessun dato ICON per %s, analisi convettiva limitata", name)
return None
def analyze_all_locations(debug_mode: bool = False) -> None:
"""Analizza tutte le località del circondario."""
LOGGER.info("=== Analisi Temporali Severi - Circondario ===")
state = load_state()
was_alert_active = bool(state.get("alert_active", False))
locations_with_risk = {}
for location in LOCALITA_CIRCONDARIO:
name = location["name"]
storm_events = analyze_location(location)
if storm_events:
locations_with_risk[name] = storm_events
max_score = max(e["score"] for e in storm_events)
# Controlla se è un nuovo evento o peggioramento
loc_state = state.get("locations", {}).get(name, {})
prev_score = float(loc_state.get("last_score", 0.0) or 0.0)
if debug_mode or not loc_state.get("alert_sent", False) or (max_score >= prev_score + 15.0):
# Aggiorna stato
if "locations" not in state:
state["locations"] = {}
state["locations"][name] = {
"last_score": float(max_score),
"alert_sent": True,
"last_storm_time": storm_events[0]["timestamp"]
}
time.sleep(0.5) # Rate limiting per API
# Invia alert se ci sono località a rischio
if locations_with_risk or debug_mode:
if locations_with_risk:
msg = format_circondario_alert(locations_with_risk)
if msg:
ok = telegram_send_html(msg)
if ok:
LOGGER.info("Alert inviato per %d località", len(locations_with_risk))
else:
LOGGER.warning("Alert NON inviato (token missing o errore Telegram)")
state["alert_active"] = True
save_state(state)
elif debug_mode:
# In modalità debug, invia messaggio anche senza rischi
msg = (
" <b>ANALISI CIRCONDARIO - Nessun Rischio</b>\n"
f"📍 Analizzate {len(LOCALITA_CIRCONDARIO)} località\n"
f"🕒 Finestra: prossime {HOURS_AHEAD} ore\n"
"<i>Nessun temporale severo previsto nel circondario.</i>"
)
telegram_send_html(msg)
LOGGER.info("Messaggio debug inviato (nessun rischio)")
# All-clear se era attivo e ora non c'è più rischio
if was_alert_active and not locations_with_risk:
msg = (
"🟢 <b>ALLERTA TEMPORALI SEVERI - RIENTRATA</b>\n"
"<i>Condizioni rientrate sotto le soglie di guardia per tutte le località del circondario.</i>"
)
telegram_send_html(msg)
LOGGER.info("All-clear inviato")
state["alert_active"] = False
state["locations"] = {}
save_state(state)
elif not locations_with_risk:
state["alert_active"] = False
save_state(state)
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(description="Severe weather alert - Circondario")
arg_parser.add_argument("--debug", action="store_true",
help="Invia messaggi solo all'admin (chat ID: %s)" % TELEGRAM_CHAT_IDS[0])
args = arg_parser.parse_args()
chat_ids = None
if args.debug:
chat_ids = [TELEGRAM_CHAT_IDS[0]]
analyze_all_locations(debug_mode=args.debug)