Add Push Notification

This commit is contained in:
2026-04-22 01:43:09 +02:00
parent 541e6f1ce3
commit 324b066f51
5 changed files with 216 additions and 9 deletions
+83 -2
View File
@@ -8,6 +8,7 @@ import urllib.request
import threading
import time
import logging
from pywebpush import webpush, WebPushException
from logging.handlers import RotatingFileHandler
from flask_socketio import SocketIO, emit
@@ -50,6 +51,9 @@ def init_db():
(id INTEGER PRIMARY KEY AUTOINCREMENT, username TEXT UNIQUE, password_hash TEXT,
role TEXT, allowed_nodes TEXT)''')
c.execute('''CREATE TABLE IF NOT EXISTS push_subscriptions
(id INTEGER PRIMARY KEY AUTOINCREMENT, username TEXT, subscription TEXT UNIQUE)''')
c.execute('''CREATE TABLE IF NOT EXISTS audit_logs
(id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp DATETIME, username TEXT,
client_id TEXT, command TEXT)''')
@@ -117,6 +121,7 @@ app.secret_key = 'ari_fvg_secret_ultra_secure'
client_states = {}
device_configs = {}
client_telemetry = {}
last_notified_errors = {}
device_health = {}
last_seen_reflector = {}
network_mapping = {}
@@ -171,15 +176,28 @@ def on_message(client, userdata, msg):
except Exception as e:
logger.error(f"Errore parsing config JSON: {e}")
# --- GESTIONE STATI SERVIZIO E NODO ---
elif parts[0] == 'servizi':
client_states[cid] = payload
socketio.emit('dati_aggiornati') # <--- WEBSOCKET
# --- GRILLETTO PUSH: STATO NODO ---
if payload.upper() == 'OFFLINE':
if last_notified_errors.get(f"{cid}_NODE") != 'OFFLINE':
broadcast_push_notification(f"💀 NODO OFFLINE: {cid.upper()}", "Disconnesso dal broker.")
last_notified_errors[f"{cid}_NODE"] = 'OFFLINE'
elif payload.upper() == 'ONLINE':
if last_notified_errors.get(f"{cid}_NODE") == 'OFFLINE':
broadcast_push_notification(f"🌤️ NODO ONLINE: {cid.upper()}", "Tornato operativo.")
del last_notified_errors[f"{cid}_NODE"]
if payload.upper() not in ['OFF', 'OFFLINE', '']:
tel = client_telemetry.get(cid, {})
if isinstance(tel, dict) and '🔄' in str(tel.get('ts1', '')):
client_telemetry[cid] = {"ts1": "In attesa...", "ts2": "In attesa...", "alt": ""}
save_cache(client_telemetry)
# --- GESTIONE SALUTE DISPOSITIVI ---
elif parts[0] == 'devices' and len(parts) >= 3 and parts[2] == 'services':
try:
data = json.loads(payload)
@@ -193,8 +211,27 @@ def on_message(client, userdata, msg):
"profiles": data.get("profiles", {"A": "PROFILO A", "B": "PROFILO B"})
}
socketio.emit('dati_aggiornati') # <--- WEBSOCKET
except Exception as e: logger.error(f"Errore parsing health: {e}")
# --- GRILLETTO PUSH: SERVIZI IN ERRORE ---
processes = data.get("processes", {})
for svc_name, svc_status in processes.items():
status_key = f"{cid}_{svc_name}"
s_lower = svc_status.lower()
if s_lower in ["error", "stopped", "failed"]:
if last_notified_errors.get(status_key) != s_lower:
msg_err = f"Servizio {svc_name} KO ({svc_status})"
if s_lower == "error": msg_err += " - Auto-healing fallito! ⚠️"
broadcast_push_notification(f"🚨 ALLARME: {cid.upper()}", msg_err)
last_notified_errors[status_key] = s_lower
elif s_lower == "online" and status_key in last_notified_errors:
broadcast_push_notification(f"✅ RIPRISTINO: {cid.upper()}", f"Servizio {svc_name} tornato ONLINE.")
del last_notified_errors[status_key]
# -----------------------------------------
except Exception as e:
logger.error(f"Errore parsing health: {e}")
# --- GESTIONE DMR GATEWAY ---
elif len(parts) >= 4 and parts[0] == 'data' and parts[2].lower() == 'dmrgateway' and (parts[3].upper().startswith('NETWORK') or parts[3].upper().startswith('DMR NETWORK')):
try:
cid = parts[1].lower()
@@ -221,6 +258,7 @@ def on_message(client, userdata, msg):
except Exception as e:
logger.error(f"Errore parsing DMRGateway per {cid}: {e}")
# --- GESTIONE ALTRI GATEWAY ---
elif parts[0] in ['dmr-gateway', 'nxdn-gateway', 'ysf-gateway', 'p25-gateway', 'dstar-gateway']:
data = json.loads(payload)
proto = "DMR"
@@ -242,6 +280,7 @@ def on_message(client, userdata, msg):
if m: save_to_sqlite(cid, {'source_id': "🌐 " + m, 'destination_id': 'NET'}, protocol=proto)
# --- GESTIONE MMDVM E TRAFFICO ---
elif parts[0] == 'mmdvm':
data = json.loads(payload)
if cid not in active_calls: active_calls[cid] = {}
@@ -304,7 +343,8 @@ def on_message(client, userdata, msg):
client_telemetry[cid]["alt"] = f"{'' if act=='end' else '⚠️'} {name}: {info['src']}"
save_cache(client_telemetry)
if k in active_calls[cid]: del active_calls[cid][k]
except Exception as e: logger.error(f"ERRORE MQTT MSG: {e}")
except Exception as e:
logger.error(f"ERRORE MQTT MSG: {e}")
# --- INIZIALIZZAZIONE CLIENT MQTT ---
mqtt_backend = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION2, "flask_backend")
@@ -625,6 +665,47 @@ def serve_sw():
def serve_icon():
return send_from_directory('.', 'icon-512.png')
def broadcast_push_notification(title, body):
wp_config = config.get('webpush')
if not wp_config: return
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("SELECT id, subscription FROM push_subscriptions")
subs = c.fetchall()
for sub_id, sub_json in subs:
try:
webpush(
subscription_info=json.loads(sub_json),
data=json.dumps({"title": title, "body": body}),
vapid_private_key=wp_config['vapid_private_key'],
vapid_claims={"sub": wp_config['vapid_claim_email']}
)
except WebPushException as ex:
if ex.response and ex.response.status_code == 410:
c.execute("DELETE FROM push_subscriptions WHERE id = ?", (sub_id,))
conn.commit()
except Exception as e:
logger.error(f"Errore generico Push: {e}")
conn.close()
@app.route('/api/vapid_public_key')
def get_vapid_key():
return jsonify({"public_key": config.get('webpush', {}).get('vapid_public_key', '')})
@app.route('/api/subscribe', methods=['POST'])
def subscribe_push():
if not session.get('logged_in'): return jsonify({"error": "Unauthorized"}), 403
sub_data = request.json
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("INSERT OR IGNORE INTO push_subscriptions (username, subscription) VALUES (?, ?)",
(session.get('user'), json.dumps(sub_data)))
conn.commit()
conn.close()
return jsonify({"success": True})
if __name__ == '__main__':
threading.Thread(target=auto_update_ids, daemon=True).start()
socketio.run(app, host='0.0.0.0', port=9000, allow_unsafe_werkzeug=True)