#!/usr/bin/env python3
"""
WashOS - Server + Poller v5 (MIELE MOVE + BLOOMEST)
======================================================
- Webserver auf Port 8080
- Bloomest Live-Daten alle 30s  → data.json
- Bloomest Finanzdaten alle 5 Min → stats.json
- Miele Move Maschinendaten alle 60s → miele.json
- Fernsteuerung über /api/ Endpunkte

Starten: python washos-server.py
"""

import threading, time, json, sys, webbrowser, requests, hashlib, os
from datetime import datetime, timezone, timedelta
from http.server import HTTPServer, SimpleHTTPRequestHandler

# Poller-Module liegen im gleichen Verzeichnis wie dieser Server
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

# ── KONFIGURATION ─────────────────────────────────────────────────
BASE_URL       = "https://www.bloomestlaundry.app/bloomest"
ID_LCS         = "CLN0001DEB"
POLL_INTERVAL  = 30       # Live-Daten alle 30s
STATS_INTERVAL = 300      # Finanzdaten alle 5 Min
PORT           = 8080
HISTORY_YEARS  = 3        # Wie viele Jahre zurück laden
MIELE_ENABLED  = True     # Miele Move Poller aktivieren
SEKO_ENABLED   = True     # Seko Dosierpumpen-Poller aktivieren
WEATHER_ENABLED = True    # Open-Meteo Wetter-Poller aktivieren

USERNAME = ""
PASSWORD = ""
LAUNDRY_ID = 0          # wird aus settings.json geladen
LCS_KEY = "1234"          # LCS-Passwort für Gerätesteuerung (Standard "1234")

def load_bloomest_credentials(output_dir):
    """Lädt Bloomest-Credentials aus settings.json.
    Muss aufgerufen werden BEVOR login() aufgerufen wird.
    Gibt True zurück wenn Credentials verfügbar sind."""
    global USERNAME, PASSWORD, LAUNDRY_ID, LCS_KEY
    settings_file = os.path.join(output_dir, "settings.json")
    if not os.path.exists(settings_file):
        return False
    try:
        with open(settings_file, encoding="utf-8") as f:
            data = json.load(f)
        creds = (data.get("bloomest") or {})
        if creds.get("username"):
            USERNAME = creds["username"]
        if creds.get("password"):
            PASSWORD = creds["password"]
        if creds.get("laundry_id"):
            try:
                LAUNDRY_ID = int(creds["laundry_id"])
            except (TypeError, ValueError):
                pass
        if creds.get("lcs_key"):
            LCS_KEY = creds["lcs_key"]
        return bool(USERNAME and PASSWORD)
    except Exception as e:
        print(f"  [Bloomest] Settings-Load-Fehler: {e}")
        return False


# ── MAPPINGS ──────────────────────────────────────────────────────
STATE = {1:"frei", 2:"in_betrieb", 3:"ausser_betrieb", 4:"reserviert"}
TYPES = {1:"Waschmaschine", 2:"Trockner", 3:"Muenzautomat"}

PAYMENT_MAP = {
    "ALL_MOVEMENTS.SALE_CREDIT_CARD":  "karte",        # EC-/Kreditkarte (POS-Terminal)
    "ALL_MOVEMENTS.SALE_CASHLESS":     "kundenkarte",   # Kundenkarte (internes Guthaben)
    "ALL_MOVEMENTS.SALE_CASH":         "bar",            # Bargeld / Münzen
    "ALL_MOVEMENTS.LOYALTY_CARD_SALE": "kundenkarte",   # Kundenkarte (Legacy-Key)
    "ALL_MOVEMENTS.LOYALTY_CARD_LOAD": "aufladung",     # Kartenaufladung
    "ALL_MOVEMENTS.SALE_APP":          "app",            # App-Zahlung
    "ALL_MOVEMENTS.REFUND":            "rueckerstattung",
    "ALL_MOVEMENTS.OVERPAY":           "overpay",
    "ALL_MOVEMENTS.NORMAL_SALE":       "verkauf",
    "ALL_MOVEMENTS.DISCOUNTED_SALE":   "verkauf",       # Rabattierter Verkauf
    "ALL_MOVEMENTS.RECHARGE_CASH":     "aufladung",     # Aufladung bar
    "ALL_MOVEMENTS.RECHARGE_CREDIT_CARD": "aufladung",  # Aufladung Karte
}

PRODUCT_MAP = {
    "GENERAL.WASH":    "waschen",
    "GENERAL.DRY":     "trocknen",
    "GENERAL.SERVICE": "service",
}

session = requests.Session()
session.headers.update({
    "User-Agent": "WashOS/3.0",
    "Accept":     "application/json, text/plain, */*",
    "Referer":    f"{BASE_URL}/#/dashboard",
})
script_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(os.path.dirname(script_dir), "data")
os.makedirs(data_dir, exist_ok=True)


# ── PUSH NOTIFICATIONS + DAILY-DIGEST (v1.0) ──────────────────────
# VAPID-Keys werden beim ersten Start generiert und in data/vapid_keys.json
# gespeichert. Browser nutzt den public key für die Subscription.
VAPID_FILE = os.path.join(data_dir, "vapid_keys.json")
PUSH_ENABLED = True  # kann deaktiviert werden falls pywebpush nicht installiert

try:
    from pywebpush import webpush, WebPushException
    from py_vapid import Vapid
    import base64
    _PUSH_LIB_AVAILABLE = True
except ImportError:
    _PUSH_LIB_AVAILABLE = False
    print("[PUSH] pywebpush/py_vapid nicht installiert — Push deaktiviert.")
    print("[PUSH] Installation: pip install pywebpush py-vapid")


def get_vapid_keys():
    """Liest oder erzeugt VAPID-Keys. Gibt {public, private, claims_email} zurück."""
    if not _PUSH_LIB_AVAILABLE:
        return None
    if os.path.exists(VAPID_FILE):
        try:
            with open(VAPID_FILE, encoding="utf-8") as f:
                return json.load(f)
        except Exception:
            pass
    # Neue Keys generieren
    try:
        vapid = Vapid()
        vapid.generate_keys()
        # Keys als Base64-URL-safe speichern
        import ecdsa
        priv_key = vapid.private_key
        pub_key = vapid.public_key

        # Private key raw
        priv_raw = priv_key.to_string()
        priv_b64 = base64.urlsafe_b64encode(priv_raw).rstrip(b"=").decode("ascii")

        # Public key uncompressed point format (65 bytes: 0x04 + 32 X + 32 Y)
        pub_point = pub_key.to_string("uncompressed")
        pub_b64 = base64.urlsafe_b64encode(pub_point).rstrip(b"=").decode("ascii")

        keys = {
            "public": pub_b64,
            "private": priv_b64,
            "claims_email": "mailto:admin@washos.de",
            "created": datetime.now().isoformat(),
        }
        write_json_atomic(VAPID_FILE, keys)
        print(f"[PUSH] Neue VAPID-Keys generiert und gespeichert in {VAPID_FILE}")
        return keys
    except Exception as e:
        print(f"[PUSH] VAPID-Key-Generierung fehlgeschlagen: {e}")
        return None


def get_push_subscriptions():
    """Lädt alle aktiven Push-Subscriptions."""
    push_file = os.path.join(data_dir, "push_subscriptions.json")
    if not os.path.exists(push_file):
        return []
    try:
        with open(push_file, encoding="utf-8") as f:
            subs = json.load(f)
        if not isinstance(subs, list):
            return []
        return subs
    except Exception:
        return []


def remove_push_subscription(endpoint):
    """Entfernt Subscription (z.B. bei 410 Gone)."""
    push_file = os.path.join(data_dir, "push_subscriptions.json")
    subs = get_push_subscriptions()
    subs = [s for s in subs if s.get("subscription", {}).get("endpoint") != endpoint]
    write_json_atomic(push_file, subs)


def send_push_to_all(title, body, url=None, tag=None):
    """Sendet Push-Benachrichtigung an alle registrierten Subscriptions.
    Liefert (sent_count, failed_count) zurück."""
    if not _PUSH_LIB_AVAILABLE or not PUSH_ENABLED:
        return (0, 0)
    keys = get_vapid_keys()
    if not keys:
        return (0, 0)
    subs = get_push_subscriptions()
    if not subs:
        return (0, 0)

    payload = {
        "title": title,
        "body": body,
        "url": url or "/",
    }
    if tag:
        payload["tag"] = tag

    sent, failed = 0, 0
    private_key_pem = _vapid_private_to_pem(keys["private"])
    if not private_key_pem:
        return (0, len(subs))

    for entry in subs:
        sub = entry.get("subscription")
        if not sub:
            continue
        try:
            webpush(
                subscription_info=sub,
                data=json.dumps(payload, ensure_ascii=False),
                vapid_private_key=private_key_pem,
                vapid_claims={"sub": keys.get("claims_email", "mailto:admin@washos.de")},
                timeout=10,
            )
            sent += 1
        except WebPushException as e:
            failed += 1
            # Bei 410 Gone: Subscription entfernen
            if e.response and e.response.status_code in (404, 410):
                try:
                    remove_push_subscription(sub.get("endpoint"))
                except Exception:
                    pass
        except Exception as e:
            failed += 1
            print(f"[PUSH] Fehler bei Versand: {e}")
    if sent or failed:
        ts = datetime.now().strftime("%H:%M:%S")
        print(f"[{ts}] PUSH | '{title}' → {sent} ok / {failed} fail")
    return (sent, failed)


def _vapid_private_to_pem(priv_b64):
    """Konvertiert Base64-Private-Key zu PEM-Format (für pywebpush)."""
    try:
        import ecdsa
        # Padding wiederherstellen
        pad = 4 - (len(priv_b64) % 4)
        if pad and pad < 4:
            priv_b64 += "=" * pad
        priv_raw = base64.urlsafe_b64decode(priv_b64)
        sk = ecdsa.SigningKey.from_string(priv_raw, curve=ecdsa.NIST256p)
        return sk.to_pem().decode("ascii")
    except Exception as e:
        print(f"[PUSH] PEM-Konvertierung fehlgeschlagen: {e}")
        return None


# Daily-Digest-Scheduler: läuft in einem eigenen Thread, prüft minütlich
# ob die konfigurierte Digest-Uhrzeit erreicht ist.
_last_digest_date = None

def digest_scheduler_loop():
    """Endlosschleife: prüft minütlich ob Daily-Digest fällig ist."""
    global _last_digest_date
    while True:
        try:
            # Settings laden: digest_time ("HH:MM"), digest_enabled (bool)
            settings_file = os.path.join(data_dir, "settings.json")
            digest_time = "08:00"
            digest_enabled = True
            if os.path.exists(settings_file):
                try:
                    with open(settings_file, encoding="utf-8") as f:
                        settings = json.load(f)
                    digest_time = settings.get("digest_time", "08:00")
                    digest_enabled = settings.get("digest_enabled", True)
                except Exception:
                    pass

            if digest_enabled:
                now = datetime.now()
                today_str = now.strftime("%Y-%m-%d")
                current_hm = now.strftime("%H:%M")
                if current_hm == digest_time and _last_digest_date != today_str:
                    try:
                        send_daily_digest()
                        _last_digest_date = today_str
                    except Exception as e:
                        print(f"[DIGEST] Fehler: {e}")
        except Exception as e:
            print(f"[DIGEST-LOOP] {e}")
        time.sleep(30)  # Check alle 30 s


def send_daily_digest():
    """Erzeugt Tageszusammenfassung und sendet als Push-Notification."""
    # Sammle die 3 wichtigsten Infos:
    # 1. Umsatz gestern + Trend
    # 2. Aktive Alarme
    # 3. Wichtigster Handlungspunkt

    lines = []

    # 1. Umsatz gestern
    try:
        stats_file = os.path.join(data_dir, "stats.json")
        if os.path.exists(stats_file):
            with open(stats_file, encoding="utf-8") as f:
                stats = json.load(f)
            daily = stats.get("daily_90") or []
            if len(daily) >= 2:
                gestern = daily[-2]
                gestern_rev = gestern.get("revenue_total", 0) / 100
                # Gleicher Wochentag 7 Tage vorher
                if len(daily) >= 9:
                    ref = daily[-9]
                    ref_rev = ref.get("revenue_total", 0) / 100
                    if ref_rev > 0:
                        change = (gestern_rev - ref_rev) / ref_rev * 100
                        sign = "+" if change >= 0 else ""
                        lines.append(f"Umsatz gestern: {gestern_rev:.0f} € ({sign}{change:.0f}% vs. Vorwoche)")
                    else:
                        lines.append(f"Umsatz gestern: {gestern_rev:.0f} €")
                else:
                    lines.append(f"Umsatz gestern: {gestern_rev:.0f} €")
    except Exception:
        pass

    # 2. Aktive Alarme
    try:
        alerts_active_file = os.path.join(data_dir, "alerts_active.json")
        if os.path.exists(alerts_active_file):
            with open(alerts_active_file, encoding="utf-8") as f:
                active = json.load(f)
            if isinstance(active, list) and len(active) > 0:
                rot = sum(1 for a in active if a.get("severity") == "red")
                gelb = sum(1 for a in active if a.get("severity") == "amber")
                if rot > 0:
                    lines.append(f"⚠ {rot} rote, {gelb} gelbe Alarme aktiv")
                elif gelb > 0:
                    lines.append(f"{gelb} gelbe Alarme aktiv")
                else:
                    lines.append(f"{len(active)} Alarme aktiv")
            else:
                lines.append("✓ Keine aktiven Alarme")
    except Exception:
        pass

    # 3. Top-Handlungspunkt (Wartung fällig / Scheinfach voll)
    try:
        action_item = None
        # Wartung fällig?
        maint_file = os.path.join(data_dir, "maintenance.json")
        if os.path.exists(maint_file):
            with open(maint_file, encoding="utf-8") as f:
                maint = json.load(f)
            overdue = [lbl for lbl, m in maint.items()
                       if isinstance(m, dict) and m.get("overdue")]
            if overdue:
                action_item = f"Wartung fällig: Maschine {', '.join(overdue[:3])}"
        if not action_item:
            # Boiler kritisch?
            boiler_file = os.path.join(data_dir, "boiler_log.json")
            if os.path.exists(boiler_file):
                with open(boiler_file, encoding="utf-8") as f:
                    bl = json.load(f)
                if bl.get("stats", {}).get("critical_minutes_24h", 0) > 30:
                    action_item = "Boiler war >30 Min unter 40 °C — Check nötig"
        if action_item:
            lines.append(action_item)
    except Exception:
        pass

    if not lines:
        lines = ["Tageszusammenfassung: alles ruhig."]

    title = "WashOS — Guten Morgen"
    body = "\n".join(lines[:3])  # Max 3 Zeilen, wie Marcos Leitplanke

    sent, failed = send_push_to_all(title, body, url="/", tag="daily-digest")
    print(f"[DIGEST] {datetime.now().strftime('%H:%M')} — '{body}' → {sent} ok / {failed} fail")


# VAPID-Public-Key für HTTP-Endpoint bereitstellen (Frontend braucht ihn)
def get_vapid_public_key_string():
    keys = get_vapid_keys()
    return keys.get("public") if keys else None


# ── AUTH ──────────────────────────────────────────────────────────
def login() -> bool:
    passwd_hash = hashlib.sha1(PASSWORD.encode()).hexdigest()
    try:
        r = session.post(
            f"{BASE_URL}/login",
            params={"user": USERNAME, "passwd": passwd_hash, "source": "1"},
            timeout=10,
        )
        if r.status_code == 200:
            try:
                d = r.json()
                if d.get("idUser"):
                    print(f"Eingeloggt - User {d['idUser']}, Level {d.get('userLevel')}")
                    return True
            except Exception:
                pass
            return True
        print(f"Login fehlgeschlagen - HTTP {r.status_code}")
        return False
    except Exception as e:
        print(f"Login Fehler: {e}")
        return False


def api_get(path, params=None):
    r = session.get(f"{BASE_URL}/{path}", params=params, timeout=10)
    r.raise_for_status()
    return r.json()


def api_stats_day(day, debug=False):
    body = {
        'start': day.strftime('%Y-%m-%d 00:00:00'),
        'end':   day.strftime('%Y-%m-%d 23:59:59'),
        'laundry': LAUNDRY_ID,
    }
    try:
        r = session.put(
            BASE_URL + '/statistics?type=all_movements',
            data=json.dumps(body),
            headers={'Content-Type': 'application/json'},
            timeout=30,
        )
        if debug:
            print('  DEBUG ' + str(day) + ': HTTP ' + str(r.status_code) + ' | ' + r.text[:200])
        if r.status_code == 200:
            data = r.json()
            if isinstance(data, list):
                return data
            return []
        if r.status_code == 401:
            login()
            r2 = session.put(BASE_URL + '/statistics?type=all_movements', data=json.dumps(body), headers={'Content-Type': 'application/json'}, timeout=30)
            if r2.status_code == 200:
                d2 = r2.json()
                return d2 if isinstance(d2, list) else []
        return []
    except Exception as e:
        if debug:
            print('  FEHLER ' + str(day) + ': ' + str(e))
        return []


def diagnose_stats():
    from datetime import date, timedelta
    yesterday = date.today() - timedelta(days=1)
    print('Diagnose Statistik-Endpoint fuer ' + str(yesterday) + '...')
    body = {'start': yesterday.strftime('%Y-%m-%d 00:00:00'), 'end': yesterday.strftime('%Y-%m-%d 23:59:59'), 'laundry': LAUNDRY_ID}
    try:
        r = session.put(BASE_URL + '/statistics?type=all_movements', data=json.dumps(body), headers={'Content-Type': 'application/json'}, timeout=30)
        print('  HTTP ' + str(r.status_code))
        if r.status_code == 200:
            data = r.json()
            if isinstance(data, list):
                print('  ' + str(len(data)) + ' Transaktionen')
                if data:
                    print('  Erstes Element: ' + str(data[0])[:300])
                else:
                    print('  Leeres Array - kein Umsatz gestern oder Session-Problem')
            else:
                print('  PROBLEM kein Array: ' + str(data)[:200])
        else:
            print('  Antwort: ' + r.text[:300])
    except Exception as e:
        print('  FEHLER: ' + str(e))




# ── TRANSAKTION ANALYSE ────────────────────────────────────────────
def classify(tx: dict) -> dict:
    keys = [t.get("baseTranslationId", "")
            for t in (tx.get("idMovementDescriptionTranslation") or [])]
    payment = "sonstige"
    for k in keys:
        p = PAYMENT_MAP.get(k)
        if p in ("karte", "bar", "kundenkarte", "app"):
            payment = p
            break
    is_sale   = any("SALE" in k for k in keys) and not any("REFUND" in k for k in keys)
    is_refund = any("REFUND" in k for k in keys)
    product_key = tx.get("idProductDescriptionTranslation", "")
    product = PRODUCT_MAP.get(product_key, "sonstige")
    dev = tx.get("device") or {}
    device_type = dev.get("id_device_type") or 0

    # Fallback: Wenn product "sonstige" ist aber device_type bekannt,
    # ordne nach Gerätetyp zu (1=Waschmaschine, 2=Trockner)
    if product == "sonstige" and device_type == 1:
        product = "waschen"
    elif product == "sonstige" and device_type == 2:
        product = "trocknen"

    # Trockner-Laufzeit aus Preis ableiten
    # Bloomest-Trockner: operationDetails.duration enthält die Laufzeit
    # Fallback: Preis / Minutenpreis (typisch: 1€ pro 10 Min.)
    dryer_min = None
    if device_type == 2 and is_sale:
        op = tx.get("operationDetails") or {}
        dur = op.get("duration")
        if dur:
            # ISO 8601 Duration: PT8301S oder PT30M etc.
            import re
            m = re.match(r"PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?",
                         str(dur))
            if m:
                h = int(m.group(1) or 0)
                mi = int(m.group(2) or 0)
                s = int(m.group(3) or 0)
                dryer_min = h * 60 + mi + round(s / 60)
        if dryer_min is None:
            # Fallback: Preis-basiert (1€ = 10 Min.)
            val = (tx.get("value") or 0) / 100
            if val > 0:
                dryer_min = round(val * 10)

    return {
        "date":         tx.get("date") or "",
        "value_eur":    round((tx.get("value") or 0) / 100, 2),
        "payment":      payment,
        "product":      product,
        "is_sale":      is_sale,
        "is_refund":    is_refund,
        "device_label": dev.get("label") or "?",
        "device_model": dev.get("model") or "?",
        "device_type":  device_type,
        "quantity":     tx.get("quantity") or 1,
        "dryer_min":    dryer_min,
    }


def aggregate(transactions: list) -> dict:
    sales = [t for t in transactions if t["is_sale"]]
    total = sum(t["value_eur"] for t in sales)
    bar         = sum(t["value_eur"] for t in sales if t["payment"] == "bar")
    karte       = sum(t["value_eur"] for t in sales if t["payment"] == "karte")
    kundenkarte = sum(t["value_eur"] for t in sales if t["payment"] == "kundenkarte")
    app         = sum(t["value_eur"] for t in sales if t["payment"] == "app")
    sonstige    = sum(t["value_eur"] for t in sales if t["payment"] not in ("bar","karte","kundenkarte","app"))
    waschen     = sum(t["value_eur"] for t in sales if t["product"] == "waschen")
    trocknen    = sum(t["value_eur"] for t in sales if t["product"] == "trocknen")

    by_device = {}
    for t in sales:
        k = f"{t['device_label']} {t['device_model']}"
        if k not in by_device:
            by_device[k] = {
                "label": t["device_label"], "model": t["device_model"],
                "type": t["device_type"], "umsatz": 0.0, "zyklen": 0,
                "bar": 0.0, "karte": 0.0, "kundenkarte": 0.0, "app": 0.0,
            }
        by_device[k]["umsatz"] = round(by_device[k]["umsatz"] + t["value_eur"], 2)
        by_device[k]["zyklen"] += 1
        p = t["payment"] if t["payment"] in ("bar", "karte", "kundenkarte", "app") else None
        if p:
            by_device[k][p] = round(by_device[k][p] + t["value_eur"], 2)
    # Ø pro Zyklus berechnen
    for k in by_device:
        z = by_device[k]["zyklen"]
        by_device[k]["avg_per_cycle"] = round(by_device[k]["umsatz"] / z, 2) if z else 0

    by_hour = {}
    for t in sales:
        try:
            h = datetime.fromisoformat(t["date"].replace("Z", "+00:00")).hour
        except Exception:
            h = 0
        by_hour[h] = round(by_hour.get(h, 0) + t["value_eur"], 2)

    # Trockner-Laufzeit-Verteilung
    dryer_durations = {}
    dryer_sales = [t for t in sales if t["device_type"] == 2]
    for t in dryer_sales:
        dm = t.get("dryer_min")
        if dm and dm > 0:
            # Runde auf 10er-Schritte (10, 20, 30, 40, 50, 60)
            bucket = max(10, min(60, round(dm / 10) * 10))
            dryer_durations[bucket] = dryer_durations.get(bucket, 0) + 1

    return {
        "total":        round(total, 2),
        "zyklen":       len(sales),
        "bar":          round(bar, 2),
        "karte":        round(karte, 2),
        "kundenkarte":  round(kundenkarte, 2),
        "app":          round(app, 2),
        "sonstige":     round(sonstige, 2),
        "waschen":      round(waschen, 2),
        "trocknen":     round(trocknen, 2),
        "by_device":    list(by_device.values()),
        "by_hour":      {str(h): v for h, v in sorted(by_hour.items())},
        "by_payment":   {"bar": round(bar, 2), "karte": round(karte, 2), "kundenkarte": round(kundenkarte, 2), "app": round(app, 2), "sonstige": round(sonstige, 2)},
        "avg_per_cycle": round(total / len(sales), 2) if sales else 0,
        "dryer_durations": dryer_durations,
        "dryer_cycles": len(dryer_sales),
        "wm_cycles": len([t for t in sales if t["device_type"] == 1]),
    }


def pct(new_val, old_val):
    if not old_val:
        return None
    return round((new_val - old_val) / old_val * 100, 1)


def sum_days(daily_cache: dict, from_date, to_date) -> dict:
    """Mehrere Tage aus Cache zusammenführen."""
    all_tx = []
    d = from_date
    while d <= to_date:
        all_tx.extend(daily_cache.get(d.isoformat(), []))
        d += timedelta(days=1)
    return aggregate(all_tx)


# ── FINANZ-DATEN LADEN ────────────────────────────────────────────
CACHE_VERSION = 3  # v3: dryer_min in classify()

def fetch_stats(laundry: dict, progress_cb=None) -> dict:
    today     = datetime.now().date()
    start_day = today.replace(year=today.year - HISTORY_YEARS)

    # Tageweise laden (mit Cache aus vorheriger stats.json)
    stats_path = os.path.join(data_dir, "stats.json")
    daily_cache = {}

    # Vorhandene Daten laden um nur fehlende Tage neu abzufragen
    if os.path.exists(stats_path):
        try:
            with open(stats_path, encoding="utf-8") as f:
                old = json.load(f)
            old_version = old.get("meta", {}).get("cache_version", 1)
            if old_version == CACHE_VERSION:
                for entry in old.get("daily_cache", []):
                    daily_cache[entry["date"]] = entry.get("transactions", [])
                print(f"  Cache geladen: {len(daily_cache)} Tage vorhanden (v{CACHE_VERSION})")
            else:
                print(f"  Cache veraltet (v{old_version} → v{CACHE_VERSION}) — wird neu aufgebaut!")
        except Exception:
            pass

    # Fehlende Tage ermitteln + heute und gestern IMMER neu laden
    d = start_day
    missing = []
    yesterday = today - timedelta(days=1)
    while d <= today:
        if d.isoformat() not in daily_cache:
            missing.append(d)
        elif d >= yesterday:
            # Heute und gestern immer neu laden (neue Transaktionen!)
            missing.append(d)
        d += timedelta(days=1)

    total_days = len(missing)
    if total_days:
        # Unterscheide zwischen initiellem Laden und täglichem Update
        old_days = [d for d in missing if d < yesterday]
        fresh_days = [d for d in missing if d >= yesterday]
        if old_days:
            print(f"  Lade {len(old_days)} fehlende + {len(fresh_days)} aktuelle Tage...")
        else:
            pass  # Kein Log für normales Polling (heute/gestern)
    else:
        print("  Cache vollständig.")

    for i, day in enumerate(missing):
        txs_raw = api_stats_day(day)
        txs     = [classify(t) for t in txs_raw]
        daily_cache[day.isoformat()] = txs
        if progress_cb:
            progress_cb(i + 1, total_days, day)
        elif total_days > 0 and (i % 30 == 0 or i == total_days - 1):
            print(f"  {i+1}/{total_days} Tage geladen ({day})...")
        time.sleep(0.1)  # Rate-Limit schonen

    # Aggregierte Zeiträume berechnen
    def agg(from_d, to_d):
        return sum_days(daily_cache, from_d, to_d)

    ym1 = (today.replace(day=1) - timedelta(days=1))
    prev_month_start = ym1.replace(day=1)

    aq_month   = (today.month - 1) // 3 * 3 + 1
    q_start    = today.replace(month=aq_month, day=1)
    pq_start   = (q_start - timedelta(days=1)).replace(day=1)
    pq_start   = pq_start.replace(month=(pq_start.month - 1) // 3 * 3 + 1)
    pq_end     = q_start - timedelta(days=1)

    a = {
        "heute":      agg(today, today),
        "gestern":    agg(today - timedelta(days=1), today - timedelta(days=1)),
        "vorgestern": agg(today - timedelta(days=2), today - timedelta(days=2)),
        "woche":      agg(today - timedelta(days=6), today),
        "vorwoche":   agg(today - timedelta(days=13), today - timedelta(days=7)),
        "monat":      agg(today.replace(day=1), today),
        "vormonat":   agg(prev_month_start, ym1),
        "quartal":    agg(q_start, today),
        "vorquartal": agg(pq_start, pq_end),
        "jahr":       agg(today.replace(month=1, day=1), today),
        "vorjahr":    agg(today.replace(year=today.year-1, month=1, day=1),
                         today.replace(year=today.year-1, month=12, day=31)),
    }

    trends = {
        "tag_vs_gestern":       pct(a["heute"]["total"],    a["gestern"]["total"]),
        "woche_vs_vorwoche":    pct(a["woche"]["total"],    a["vorwoche"]["total"]),
        "monat_vs_vormonat":    pct(a["monat"]["total"],    a["vormonat"]["total"]),
        "quartal_vs_vorquartal":pct(a["quartal"]["total"],  a["vorquartal"]["total"]),
        "jahr_vs_vorjahr":      pct(a["jahr"]["total"],     a["vorjahr"]["total"]),
        "zyklen_tag_vs_gestern":pct(a["heute"]["zyklen"],   a["gestern"]["zyklen"]),
        "zyklen_woche_vs_vor":  pct(a["woche"]["zyklen"],   a["vorwoche"]["zyklen"]),
        "zyklen_monat_vs_vor":  pct(a["monat"]["zyklen"],   a["vormonat"]["zyklen"]),
    }

    # Letzte 90 Tage für Charts
    daily_90 = []
    for i in range(89, -1, -1):
        d = today - timedelta(days=i)
        txs = daily_cache.get(d.isoformat(), [])
        agg_d = aggregate(txs)
        daily_90.append({
            "date":  d.isoformat(),
            "label": d.strftime("%d.%m"),
            **agg_d,
        })

    # Letzte 400 Tage (13 Monate) für tagesgenauen Vorjahresvergleich
    # Nur mit minimaler Felderauswahl, um Payload klein zu halten
    daily_400 = []
    for i in range(399, -1, -1):
        d = today - timedelta(days=i)
        txs = daily_cache.get(d.isoformat(), [])
        agg_d = aggregate(txs)
        daily_400.append({
            "date":   d.isoformat(),
            "label":  d.strftime("%d.%m.%Y"),
            "total":  agg_d.get("total", 0),
            "zyklen": agg_d.get("zyklen", 0),
            "avg_per_cycle": agg_d.get("avg_per_cycle", 0),
        })

    # Monatliche Aggregation (letzte 36 Monate)
    monthly = []
    for i in range(35, -1, -1):
        y = today.year
        m = today.month - i
        while m <= 0: m += 12; y -= 1
        m_start = datetime(y, m, 1).date()
        import calendar
        m_end   = datetime(y, m, calendar.monthrange(y, m)[1]).date()
        if m_end > today: m_end = today
        agg_m = agg(m_start, m_end)
        monthly.append({
            "date":  m_start.isoformat(),
            "label": m_start.strftime("%b %y"),
            **agg_m,
        })

    # Jahresdaten
    yearly = []
    for i in range(HISTORY_YEARS, -1, -1):
        y = today.year - i
        y_start = datetime(y, 1, 1).date()
        y_end   = datetime(y, 12, 31).date()
        if y_end > today: y_end = today
        if y_start > today: continue
        agg_y = agg(y_start, y_end)
        yearly.append({
            "year":  y,
            "label": str(y),
            **agg_y,
        })

    # Wochentag-Analyse
    weekday_stats = {i: {"sum": 0, "cnt": 0} for i in range(7)}
    for entry in daily_90:
        wd = datetime.fromisoformat(entry["date"]).weekday()  # Mo=0, So=6
        if entry["total"] > 0:
            weekday_stats[wd]["sum"] += entry["total"]
            weekday_stats[wd]["cnt"] += 1
    weekday_avg = {
        str(i): round(v["sum"] / v["cnt"], 2) if v["cnt"] else 0
        for i, v in weekday_stats.items()
    }

    # Top/Flop Tage
    active_days = [d for d in daily_90 if d["total"] > 0]
    best_days   = sorted(active_days, key=lambda x: x["total"], reverse=True)[:5]
    worst_days  = sorted(active_days, key=lambda x: x["total"])[:3]

    # Hochrechnung
    today_day_of_month = today.day
    monthly_proj = (
        round(a["monat"]["total"] / today_day_of_month * 30, 2)
        if today_day_of_month > 0 else 0
    )

    # Cache serialisieren (nur Tage mit Daten speichern, leere Tage als leer)
    cache_list = [
        {"date": date_str, "transactions": txs}
        for date_str, txs in sorted(daily_cache.items())
        if date_str >= start_day.isoformat()
    ]

    return {
        "meta": {
            "last_update":  datetime.now(timezone.utc).isoformat(),
            "location":     laundry.get("description", "Waschsalon"),
            "id_lcs":       ID_LCS,
            "history_from": start_day.isoformat(),
            "history_to":   today.isoformat(),
            "cached_days":  len(daily_cache),
            "cache_version": CACHE_VERSION,
        },
        "aggregated":    a,
        "trends":        trends,
        "daily_90":      daily_90,
        "daily_400":     daily_400,
        "monthly":       monthly,
        "yearly":        yearly,
        "weekday_avg":   weekday_avg,
        "best_days":     best_days,
        "worst_days":    worst_days,
        "monthly_proj":  monthly_proj,
        "daily_cache":   cache_list,
    }


# ── LIVE POLLING ──────────────────────────────────────────────────
def normalize_device(raw):
    prices = raw.get("pricesConfiguration") or [{}]
    p = prices[0]; op = raw.get("operationDetails") or {}
    return {
        "id": raw.get("idDevice"), "serial": raw.get("serialNumber"),
        "model": raw.get("model"), "label": raw.get("label"),
        "size": raw.get("size"),
        "type": TYPES.get(raw.get("type", 1), "Geraet"),
        "type_raw": raw.get("type", 1),
        "state": STATE.get(raw.get("state", 1), "unbekannt"),
        "state_raw": raw.get("state", 1),
        "in_use": raw.get("isInUse", False),
        "in_service": raw.get("isInService", True),
        "time_bought": raw.get("timeBought", 0),
        "remaining_min": op.get("remainingMinutes"),
        "program": op.get("programName"),
        "temp_wash": op.get("temperature"),
        "start_date": op.get("startDate"),        # Trockner: Startzeitpunkt
        "duration_sec": op.get("duration"),        # Trockner: Gesamtdauer in Sekunden
        "price_cash":    round(p.get("cash",     0) / 100, 2),
        "price_cashless":round(p.get("cashless", 0) / 100, 2),
        "prices_full": [{
            "cash": pp.get("cash", 0), "cashless": pp.get("cashless", 0),
            "discount1": pp.get("discount1", 0), "discount2": pp.get("discount2", 0),
            "baseMinutes": pp.get("baseMinutes", 0), "quantity": pp.get("quantity", 1),
            "idProduct": pp.get("idProduct"), "programId": pp.get("programId", 0),
            "toSend": pp.get("toSend", False), "order": pp.get("order", 0),
            "programName": pp.get("programName"),
        } for pp in prices],
    }


def make_summary(devices):
    wm = [d for d in devices if d["type_raw"] == 1]
    tr = [d for d in devices if d["type_raw"] == 2]
    def cnt(lst, st): return sum(1 for d in lst if d["state"] == st)
    running = cnt(devices, "in_betrieb"); total = len(devices)
    return {
        "total": total, "running": running,
        "load_pct": round(running / total * 100) if total else 0,
        "errors": cnt(devices, "ausser_betrieb"),
        "wm_total": len(wm), "wm_free": cnt(wm,"frei"),
        "wm_running": cnt(wm,"in_betrieb"), "wm_error": cnt(wm,"ausser_betrieb"),
        "wm_reserved": cnt(wm,"reserviert"),
        "tr_total": len(tr), "tr_free": cnt(tr,"frei"),
        "tr_running": cnt(tr,"in_betrieb"), "tr_error": cnt(tr,"ausser_betrieb"),
        "tr_reserved": cnt(tr,"reserviert"),
    }


def normalize_automation(raw):
    def sw(key):
        obj = raw.get(key, {})
        if not isinstance(obj, dict): return {"stato": None, "manuale": None}
        return {"stato": obj.get("stato"), "manuale": obj.get("manuale")}
    l1=raw.get("pwrL1")or 0; l2=raw.get("pwrL2")or 0; l3=raw.get("pwrL3")or 0
    return {
        "temp_raum": raw.get("tamb"), "temp_boiler": raw.get("tacq"),
        "strom_l1": l1, "strom_l2": l2, "strom_l3": l3, "strom_total": l1+l2+l3,
        "tuer": sw("porta"), "insegna": sw("insegna"), "libero": sw("libero"),
        "wasser": sw("acqua"), "pumpe": sw("pompa"),
        "zona1": sw("zona1"), "zona2": sw("zona2"), "zona3": sw("zona3"),
        "last_update": raw.get("lastUpdate"),
    }


def write_json_atomic(filepath, data):
    """Atomares Schreiben: erst temp-Datei, dann umbenennen."""
    tmp = filepath + ".tmp"
    with open(tmp, "w", encoding="utf-8") as f:
        json.dump(data, f, ensure_ascii=False, indent=2)
    try:
        os.replace(tmp, filepath)
    except OSError:
        # Windows: replace kann fehlschlagen wenn Datei gelesen wird
        import shutil
        shutil.move(tmp, filepath)


# ── RECHNUNGS-PDF GENERIERUNG (IV-B) ─────────────────────────────────
def generate_invoice_pdf(invoice):
    """Generiert eine professionelle Rechnungs-PDF aus dem Rechnungs-JSON.
    Enthält: Briefkopf, Empfänger, Rechnungsnummer, Tabelle der Transaktionen,
    Summen (netto/MwSt/brutto), Footer mit Zahlungshinweis."""
    from reportlab.lib.pagesizes import A4
    from reportlab.lib import colors
    from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
    from reportlab.lib.units import cm, mm
    from reportlab.platypus import (
        SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak
    )
    from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER
    import io

    buf = io.BytesIO()
    doc = SimpleDocTemplate(
        buf, pagesize=A4,
        leftMargin=2 * cm, rightMargin=2 * cm,
        topMargin=2 * cm, bottomMargin=2 * cm,
    )
    styles = getSampleStyleSheet()

    # Custom styles
    style_title = ParagraphStyle("title", parent=styles["Heading1"], fontSize=20,
                                 textColor=colors.HexColor("#1a1a1a"), spaceAfter=4)
    style_h2 = ParagraphStyle("h2", parent=styles["Heading2"], fontSize=12,
                              textColor=colors.HexColor("#1a1a1a"), spaceAfter=6)
    style_normal = ParagraphStyle("n", parent=styles["Normal"], fontSize=10,
                                  textColor=colors.HexColor("#1a1a1a"), leading=13)
    style_small = ParagraphStyle("s", parent=styles["Normal"], fontSize=9,
                                 textColor=colors.HexColor("#555"), leading=12)
    style_right = ParagraphStyle("r", parent=style_normal, alignment=TA_RIGHT)
    style_foot = ParagraphStyle("f", parent=styles["Normal"], fontSize=8,
                                textColor=colors.HexColor("#666"), leading=11)

    op = invoice.get("operator", {}) or {}
    cust = invoice.get("customer", {}) or {}
    story = []

    # ── Briefkopf: Absender ──
    op_line = op.get("firma", "")
    if op.get("strasse"):
        op_line += " · " + op.get("strasse", "")
    if op.get("plz") or op.get("ort"):
        op_line += ", " + (op.get("plz", "") + " " + op.get("ort", "")).strip()
    if op_line:
        story.append(Paragraph(op_line, style_small))
    story.append(Spacer(1, 4 * mm))

    # ── Empfänger + Rechnungsmeta (zwei Spalten) ──
    # Empfänger-Adresse
    emp_lines = []
    if cust.get("firma"):
        emp_lines.append("<b>" + str(cust["firma"]) + "</b>")
    if cust.get("name"):
        emp_lines.append(str(cust["name"]))
    if cust.get("strasse"):
        emp_lines.append(str(cust["strasse"]))
    if cust.get("plz") or cust.get("ort"):
        emp_lines.append((cust.get("plz", "") + " " + cust.get("ort", "")).strip())
    if cust.get("land"):
        emp_lines.append(str(cust["land"]))
    if not emp_lines:
        emp_lines.append('<i>Rechnungsadresse nicht hinterlegt</i>')
    emp_html = "<br/>".join(emp_lines)

    # Rechnungsmeta
    gen_raw = invoice.get("generated", "")
    try:
        gen_date = datetime.fromisoformat(gen_raw).strftime("%d.%m.%Y")
    except Exception:
        gen_date = (gen_raw or "")[:10]
    month_str = invoice.get("month", "")
    try:
        y, m = month_str.split("-")
        m_names = ["", "Januar", "Februar", "März", "April", "Mai", "Juni",
                   "Juli", "August", "September", "Oktober", "November", "Dezember"]
        period_str = f"{m_names[int(m)]} {y}"
    except Exception:
        period_str = month_str

    meta_lines = [
        f"<b>Rechnung</b>",
        f"Rechnungsnr.: <b>{invoice.get('number', '')}</b>",
        f"Rechnungsdatum: {gen_date}",
        f"Leistungszeitraum: {period_str}",
    ]
    if cust.get("ust_id"):
        meta_lines.append(f"USt-IdNr.: {cust['ust_id']}")
    if cust.get("kunden_nr"):
        meta_lines.append(f"Kunden-Nr.: {cust['kunden_nr']}")
    meta_html = "<br/>".join(meta_lines)

    header_tbl = Table(
        [[Paragraph(emp_html, style_normal), Paragraph(meta_html, style_normal)]],
        colWidths=[9 * cm, 8 * cm],
    )
    header_tbl.setStyle(TableStyle([
        ("VALIGN", (0, 0), (-1, -1), "TOP"),
        ("LEFTPADDING", (0, 0), (-1, -1), 0),
        ("RIGHTPADDING", (0, 0), (-1, -1), 0),
    ]))
    story.append(header_tbl)
    story.append(Spacer(1, 10 * mm))

    # ── Storno-Hinweis falls storniert ──
    if invoice.get("storno"):
        story.append(Paragraph(
            '<font color="#dc2626"><b>STORNIERT</b> — Diese Rechnung wurde storniert.</font>',
            style_normal
        ))
        story.append(Spacer(1, 6 * mm))

    # ── Einleitungstext ──
    salutation = "Sehr geehrte Damen und Herren,"
    if cust.get("anrede") and cust.get("nachname"):
        salutation = f"Sehr geehrte{'r Herr' if cust.get('anrede') == 'Herr' else ' Frau'} {cust['nachname']},"
    story.append(Paragraph(salutation, style_normal))
    story.append(Spacer(1, 3 * mm))
    story.append(Paragraph(
        f"vielen Dank für Ihren Besuch bei <b>{op.get('firma', 'uns')}</b>. "
        f"Für den Abrechnungszeitraum <b>{period_str}</b> stellen wir Ihnen folgende Leistungen in Rechnung:",
        style_normal
    ))
    story.append(Spacer(1, 8 * mm))

    # ── Transaktions-Tabelle ──
    # Nur Verkaufs-Transaktionen (type 9)
    txs = [t for t in (invoice.get("transactions") or []) if t.get("type") == 9]
    if not txs:
        story.append(Paragraph('<i>Keine Transaktionen im Abrechnungszeitraum.</i>', style_normal))
    else:
        tbl_data = [["Datum", "Zeit", "Gerät", "Beschreibung", "Betrag"]]
        for t in txs:
            d = (t.get("date") or "").strip()
            # Format: "2026-04-15 14:32:22"
            date_part, _, time_part = d.partition(" ")
            if date_part:
                try:
                    ddt = datetime.strptime(date_part, "%Y-%m-%d")
                    date_str = ddt.strftime("%d.%m.%Y")
                except Exception:
                    date_str = date_part
            else:
                date_str = ""
            time_str = time_part[:5] if time_part else ""
            dev = t.get("device") or {}
            dev_str = ((dev.get("label") or "") + " " + (dev.get("model") or "")).strip()
            # Beschreibung
            descs = t.get("idMovementDescriptionTranslation") or []
            desc_parts = []
            for d_ in descs:
                key = d_.get("baseTranslationId", "") if isinstance(d_, dict) else ""
                key = key.replace("ALL_MOVEMENTS.", "").replace("LOYALTIES.DISCOUNT", "") \
                    .replace("GENERAL.", "")
                trans = {
                    "SALE_CASHLESS": "Kartenzahlung",
                    "SALE_CASH": "Barzahlung",
                    "NORMAL_SALE": "",
                    "DISCOUNTED_SALE": "rabattiert",
                }
                tr = trans.get(key, key)
                if tr:
                    desc_parts.append(tr)
            # Produktbeschreibung hinzufügen
            prod = t.get("idProductDescriptionTranslation") or {}
            if isinstance(prod, dict) and prod.get("baseTranslationId"):
                desc_parts.append(prod["baseTranslationId"])
            desc_str = " · ".join([p for p in desc_parts if p]) or "Selbstbedienung"
            # Betrag
            val = (t.get("value") or 0) / 100
            tbl_data.append([
                date_str,
                time_str,
                dev_str,
                Paragraph(desc_str, style_small),
                f"{val:.2f} €",
            ])

        tbl = Table(
            tbl_data,
            colWidths=[2.2 * cm, 1.5 * cm, 2.0 * cm, 8.3 * cm, 2.5 * cm],
            repeatRows=1,
        )
        tbl.setStyle(TableStyle([
            ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#1a1a1a")),
            ("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
            ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
            ("FONTSIZE", (0, 0), (-1, 0), 9),
            ("BOTTOMPADDING", (0, 0), (-1, 0), 6),
            ("TOPPADDING", (0, 0), (-1, 0), 6),
            ("FONTSIZE", (0, 1), (-1, -1), 9),
            ("ALIGN", (4, 0), (4, -1), "RIGHT"),
            ("ALIGN", (0, 0), (3, 0), "LEFT"),
            ("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.white, colors.HexColor("#f7f7f7")]),
            ("LINEBELOW", (0, -1), (-1, -1), 0.5, colors.HexColor("#cccccc")),
            ("VALIGN", (0, 0), (-1, -1), "TOP"),
            ("TOPPADDING", (0, 1), (-1, -1), 4),
            ("BOTTOMPADDING", (0, 1), (-1, -1), 4),
        ]))
        story.append(tbl)
        story.append(Spacer(1, 4 * mm))

    # ── Summen-Block ──
    total_net = invoice.get("total_net", 0)
    total_mwst = invoice.get("total_mwst", 0)
    total_gross = invoice.get("total_gross", 0)
    mwst_rate = int(round((invoice.get("mwst_rate") or 0.19) * 100))

    sum_data = [
        ["", "Zwischensumme netto:", f"{total_net:.2f} €"],
        ["", f"zzgl. {mwst_rate}% USt.:", f"{total_mwst:.2f} €"],
        ["", "Gesamtbetrag brutto:", f"{total_gross:.2f} €"],
    ]
    sum_tbl = Table(sum_data, colWidths=[9 * cm, 5 * cm, 2.5 * cm])
    sum_tbl.setStyle(TableStyle([
        ("FONTSIZE", (0, 0), (-1, -1), 10),
        ("ALIGN", (1, 0), (-1, -1), "RIGHT"),
        ("TOPPADDING", (0, 0), (-1, -1), 3),
        ("BOTTOMPADDING", (0, 0), (-1, -1), 3),
        ("FONTNAME", (1, -1), (-1, -1), "Helvetica-Bold"),
        ("FONTSIZE", (1, -1), (-1, -1), 11),
        ("LINEABOVE", (1, -1), (-1, -1), 1, colors.HexColor("#1a1a1a")),
        ("TOPPADDING", (1, -1), (-1, -1), 6),
    ]))
    story.append(sum_tbl)
    story.append(Spacer(1, 10 * mm))

    # ── Zahlungshinweis ──
    if cust.get("monthlyInvoice") or txs:
        story.append(Paragraph(
            "<b>Zahlungshinweis:</b> Da alle Leistungen bereits durch das Guthaben Ihrer Treuekarte "
            "direkt am Gerät bezahlt wurden, ist diese Rechnung bereits vollständig beglichen. "
            "Sie dient ausschließlich der Dokumentation für Ihre Unterlagen.",
            style_normal
        ))
    story.append(Spacer(1, 4 * mm))

    # Abschiedsgruß
    story.append(Paragraph("Mit freundlichen Grüßen<br/><br/>" + op.get("firma", ""), style_normal))
    story.append(Spacer(1, 15 * mm))

    # ── Footer ──
    foot_lines = []
    if op.get("firma"):
        f_line1 = op["firma"]
        if op.get("strasse"):
            f_line1 += " · " + op["strasse"]
        if op.get("plz") or op.get("ort"):
            f_line1 += " · " + (op.get("plz", "") + " " + op.get("ort", "")).strip()
        foot_lines.append(f_line1)
    f_line2_parts = []
    if op.get("telefon"):
        f_line2_parts.append("Tel: " + op["telefon"])
    if op.get("email"):
        f_line2_parts.append(op["email"])
    if op.get("web"):
        f_line2_parts.append(op["web"])
    if f_line2_parts:
        foot_lines.append(" · ".join(f_line2_parts))
    f_line3_parts = []
    if op.get("ust_id"):
        f_line3_parts.append("USt-IdNr: " + op["ust_id"])
    if op.get("steuer_nr"):
        f_line3_parts.append("Steuer-Nr: " + op["steuer_nr"])
    if op.get("hr"):
        f_line3_parts.append(op["hr"])
    if f_line3_parts:
        foot_lines.append(" · ".join(f_line3_parts))
    f_line4_parts = []
    if op.get("bank"):
        f_line4_parts.append(op["bank"])
    if op.get("iban"):
        f_line4_parts.append("IBAN: " + op["iban"])
    if op.get("bic"):
        f_line4_parts.append("BIC: " + op["bic"])
    if f_line4_parts:
        foot_lines.append(" · ".join(f_line4_parts))
    if foot_lines:
        foot_html = "<br/>".join(foot_lines)
        story.append(Paragraph(foot_html, style_foot))

    doc.build(story)
    pdf_bytes = buf.getvalue()
    buf.close()
    return pdf_bytes


# ── STROM-LOGGING ─────────────────────────────────────────────────
# Ringpuffer: speichert alle LCS-Strommessungen der letzten 30 Tage
# damit lassen sich Minuten-/Stunden-/Tagessummen auf Wattstunden umrechnen
POWER_LOG_FILE = os.path.join(data_dir, "power_log.json")
POWER_LOG_DAYS = 30

# ── BOILER-LOGGING (v1.0 Feature) ─────────────────────────────────
# Ringpuffer: speichert Boiler-Temperaturen der letzten 30 Tage.
# Ermöglicht Analyse: wann fällt die Temperatur, Heizschwert-Blockade etc.
BOILER_LOG_FILE = os.path.join(data_dir, "boiler_log.json")
BOILER_LOG_DAYS = 30

def append_boiler_log(iot):
    """Boiler-Temperatur in boiler_log.json anhängen (Ringpuffer)."""
    try:
        temp = iot.get("temp_boiler")
        if temp is None:
            return
        if os.path.exists(BOILER_LOG_FILE):
            with open(BOILER_LOG_FILE, encoding="utf-8") as f:
                log = json.load(f)
        else:
            log = {"entries": []}

        now_iso = datetime.now(timezone.utc).isoformat()
        log["entries"].append({
            "ts": now_iso,
            "temp_boiler": float(temp),
            "temp_raum": float(iot.get("temp_raum", 0) or 0),
            "strom_total": iot.get("strom_total", 0) or 0,
        })

        # Ringpuffer
        cutoff = datetime.now(timezone.utc) - timedelta(days=BOILER_LOG_DAYS)
        cutoff_iso = cutoff.isoformat()
        log["entries"] = [e for e in log["entries"] if e["ts"] >= cutoff_iso]

        log["meta"] = {
            "last_update": now_iso,
            "entry_count": len(log["entries"]),
            "retention_days": BOILER_LOG_DAYS,
        }

        # Stündliche Aggregation für Chart
        log["hourly"] = aggregate_boiler_hourly(log["entries"])
        # Statistik: wie oft unter kritischem Wert? Heizschwert-Analyse?
        log["stats"] = compute_boiler_stats(log["entries"])

        write_json_atomic(BOILER_LOG_FILE, log)
    except Exception as e:
        print(f"  [Boiler] Log-Fehler: {e}")


def aggregate_boiler_hourly(entries):
    """Stündliche Aggregation: Ø, Min, Max."""
    hours = {}
    for e in entries:
        h = e["ts"][:13]  # YYYY-MM-DDTHH
        if h not in hours:
            hours[h] = {"values": [], "ts_first": e["ts"]}
        hours[h]["values"].append(e.get("temp_boiler", 0))
    out = []
    for h, d in sorted(hours.items()):
        vals = d["values"]
        if vals:
            out.append({
                "hour": h,
                "avg": round(sum(vals) / len(vals), 1),
                "min": round(min(vals), 1),
                "max": round(max(vals), 1),
                "count": len(vals),
            })
    return out[-168:]  # Letzte 7 Tage (168h)


def _parse_hm(s, default_min=0):
    """Parse 'HH:MM' string to minutes since midnight. Fallback default_min on error."""
    try:
        if not s or ':' not in s:
            return default_min
        h, m = s.split(':')[:2]
        return int(h) * 60 + int(m)
    except Exception:
        return default_min


def _load_operating_hours():
    """Load operating hours from settings.json. Returns dict or None if not configured."""
    settings_file = os.path.join(data_dir, "settings.json")
    if not os.path.exists(settings_file):
        return None
    try:
        with open(settings_file, encoding="utf-8") as f:
            s = json.load(f)
        oh = s.get("operating_hours")
        if not oh:
            return None
        return oh
    except Exception:
        return None


def _is_in_operating_window(dt_utc, oh):
    """Check if a UTC datetime falls within the configured operating window
    (local time, with warmup/cooldown buffers). Returns True if oh not configured
    (graceful fallback — existing installations without config get 24/7 behaviour)."""
    if not oh:
        return True
    # Convert UTC timestamp → local (assume system is in target timezone)
    local_dt = dt_utc.astimezone()
    day_keys = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
    dk = day_keys[local_dt.weekday() + 1 if local_dt.weekday() < 6 else 0]
    # weekday(): Mon=0..Sun=6 → shift: Mon(0)→'mon'(idx1), Sun(6)→'sun'(idx0)
    wday_map = {0: 'mon', 1: 'tue', 2: 'wed', 3: 'thu', 4: 'fri', 5: 'sat', 6: 'sun'}
    dk = wday_map[local_dt.weekday()]
    day_cfg = oh.get(dk) or {}
    if day_cfg.get("closed"):
        return False
    minutes_now = local_dt.hour * 60 + local_dt.minute
    open_m = _parse_hm(day_cfg.get("open", "06:00"), 360) - int(oh.get("warmup_min", 60))
    close_m = _parse_hm(day_cfg.get("close", "22:00"), 1320) + int(oh.get("cooldown_min", 30))
    return open_m <= minutes_now <= close_m


def compute_boiler_stats(entries):
    """Kennzahlen: kritische Unterschreitungen, Ø, Min/Max letzte 24h.
    #12 — Nur Einträge innerhalb der konfigurierten Betriebszeiten zählen,
    weil außerhalb die Pumpe aus ist und der Sensor falsch misst."""
    if not entries:
        return {}
    # Letzte 24h filtern
    cutoff_24h = (datetime.now(timezone.utc) - timedelta(hours=24)).isoformat()
    last24 = [e for e in entries if e["ts"] >= cutoff_24h]
    if not last24:
        return {}

    # Betriebszeiten-Filter: nur aktive Fenster
    oh = _load_operating_hours()
    if oh:
        filtered = []
        for e in last24:
            try:
                ts_dt = datetime.fromisoformat(e["ts"].replace("Z", "+00:00"))
                if _is_in_operating_window(ts_dt, oh):
                    filtered.append(e)
            except Exception:
                pass
        # Fallback: wenn alle rausgefiltert wurden (z.B. 24h lang geschlossen),
        # nutze unfilterte Daten — sonst gibt's gar keine Stats
        active_entries = filtered if filtered else last24
        operating_filtered = bool(filtered)
    else:
        active_entries = last24
        operating_filtered = False

    temps = [e.get("temp_boiler", 0) for e in active_entries]
    # Schwellen (konfigurierbar via Settings später)
    CRITICAL = 40  # unter 40°C → Rot
    WARN = 50      # unter 50°C → Gelb
    critical_count = sum(1 for t in temps if t < CRITICAL)
    warn_count = sum(1 for t in temps if CRITICAL <= t < WARN)
    return {
        "avg_24h": round(sum(temps) / len(temps), 1),
        "min_24h": round(min(temps), 1),
        "max_24h": round(max(temps), 1),
        "critical_minutes_24h": critical_count,  # Annahme: 1 Entry ≈ 1 Minute
        "warn_minutes_24h": warn_count,
        "sample_count_24h": len(temps),
        "threshold_critical": CRITICAL,
        "threshold_warn": WARN,
        "operating_hours_filtered": operating_filtered,
    }


def append_power_log(iot):
    """LCS-Stromwerte in power_log.json anhängen (ringpuffer)."""
    try:
        if os.path.exists(POWER_LOG_FILE):
            with open(POWER_LOG_FILE, encoding="utf-8") as f:
                log = json.load(f)
        else:
            log = {"entries": []}

        log["entries"].append({
            "ts": datetime.now(timezone.utc).isoformat(),
            "l1": iot.get("strom_l1", 0) or 0,
            "l2": iot.get("strom_l2", 0) or 0,
            "l3": iot.get("strom_l3", 0) or 0,
            "total_w": iot.get("strom_total", 0) or 0,
        })

        # Ringpuffer: alte Einträge rausfiltern
        cutoff = datetime.now(timezone.utc) - timedelta(days=POWER_LOG_DAYS)
        cutoff_iso = cutoff.isoformat()
        log["entries"] = [e for e in log["entries"] if e["ts"] >= cutoff_iso]
        log["meta"] = {
            "last_update": datetime.now(timezone.utc).isoformat(),
            "entry_count": len(log["entries"]),
            "retention_days": POWER_LOG_DAYS,
        }

        # Stündliche Aggregation für schnellen Dashboard-Access
        log["hourly"] = aggregate_power_hourly(log["entries"])
        log["daily"] = aggregate_power_daily(log["entries"])

        write_json_atomic(POWER_LOG_FILE, log)
    except Exception as e:
        print(f"  [Power] Log-Fehler: {e}")


def aggregate_power_hourly(entries):
    """Gruppiere Einträge nach Stunde, berechne Ø W + kWh (über Integration)."""
    hours = {}
    for i, e in enumerate(entries):
        h = e["ts"][:13]  # YYYY-MM-DDTHH
        if h not in hours:
            hours[h] = {"sum_w": 0, "count": 0, "ts_first": e["ts"], "ts_last": e["ts"]}
        hours[h]["sum_w"] += e["total_w"]
        hours[h]["count"] += 1
        hours[h]["ts_last"] = e["ts"]
    out = []
    for h, d in sorted(hours.items()):
        avg_w = d["sum_w"] / d["count"] if d["count"] else 0
        # kWh: Ø W × Stunde (bei vollständigen Stunden ≈ avg_w / 1000)
        kwh = avg_w / 1000  # grobe Näherung: Ø W über 1h
        out.append({
            "hour": h,
            "avg_w": round(avg_w, 1),
            "kwh": round(kwh, 3),
            "samples": d["count"],
        })
    return out


def aggregate_power_daily(entries):
    """Tageswerte: Ø W, kWh, Peak, Min."""
    days = {}
    for e in entries:
        d = e["ts"][:10]  # YYYY-MM-DD
        if d not in days:
            days[d] = {"sum_w": 0, "count": 0, "max_w": 0, "min_w": 999999}
        days[d]["sum_w"] += e["total_w"]
        days[d]["count"] += 1
        if e["total_w"] > days[d]["max_w"]:
            days[d]["max_w"] = e["total_w"]
        if e["total_w"] < days[d]["min_w"] and e["total_w"] > 0:
            days[d]["min_w"] = e["total_w"]
    out = []
    for day, dd in sorted(days.items()):
        avg_w = dd["sum_w"] / dd["count"] if dd["count"] else 0
        # kWh pro Tag = Ø W × 24h / 1000
        kwh = avg_w * 24 / 1000
        out.append({
            "date": day,
            "avg_w": round(avg_w, 1),
            "max_w": dd["max_w"],
            "min_w": dd["min_w"] if dd["min_w"] < 999999 else 0,
            "kwh": round(kwh, 2),
            "samples": dd["count"],
        })
    return out


def write_live(devices_raw, auto_raw, laundry, poll_n):
    devices = [normalize_device(d) for d in devices_raw]
    out = {
        "meta": {
            "id_lcs": ID_LCS,
            "location": laundry.get("description","Waschsalon"),
            "address":  laundry.get("address",""),
            "city":     laundry.get("city",""),
            "last_update": datetime.now(timezone.utc).isoformat(),
            "poll_n": poll_n,
        },
        "summary":    make_summary(devices),
        "devices":    devices,
        "automation": normalize_automation(auto_raw),
    }
    write_json_atomic(os.path.join(data_dir, "data.json"), out)
    s = out["summary"]; iot = out["automation"]
    err = f"  FEHLER:{s['errors']}" if s["errors"] else ""
    print(f"[{datetime.now():%H:%M:%S}] #{poll_n:04d} | "
          f"WM {s['wm_running']}/{s['wm_total']}  TR {s['tr_running']}/{s['tr_total']}  "
          f"Last {s['load_pct']}%  |  "
          f"Raum {iot['temp_raum']}C  Boiler {iot['temp_boiler']}C  "
          f"Strom {iot['strom_total']}W{err}")


def write_stats(stats):
    write_json_atomic(os.path.join(data_dir, "stats.json"), stats)
    a = stats["aggregated"]
    print(f"[{datetime.now():%H:%M:%S}] STATS | "
          f"Heute {a['heute']['total']:.2f}€  "
          f"Woche {a['woche']['total']:.2f}€  "
          f"Monat {a['monat']['total']:.2f}€  "
          f"Jahr {a['jahr']['total']:.2f}€  "
          f"Cache: {stats['meta']['cached_days']} Tage")


# ── THREADS ───────────────────────────────────────────────────────
def live_poller(laundry):
    poll_n = 0
    while True:
        try:
            devices_raw = api_get("updates",    {"idLCS": ID_LCS})
            auto_raw    = api_get("automation", {"idLCS": ID_LCS})
            poll_n += 1
            write_live(devices_raw, auto_raw, laundry, poll_n)
            # Strom ins Power-Log schreiben (30d Ringpuffer)
            try:
                iot_norm = normalize_automation(auto_raw)
                append_power_log(iot_norm)
                append_boiler_log(iot_norm)
            except Exception as e:
                print(f"  [Power/Boiler] Fehler: {e}")
        except requests.HTTPError as e:
            if e.response.status_code == 401:
                login()
            else:
                print(f"HTTP Live: {e}")
        except Exception as e:
            print(f"Fehler Live: {e}")
        time.sleep(POLL_INTERVAL)


def stats_poller(laundry):
    time.sleep(3)
    while True:
        try:
            stats = fetch_stats(laundry)
            write_stats(stats)
        except Exception as e:
            print(f"Fehler Stats: {e}")
        time.sleep(STATS_INTERVAL)


class DashboardHandler(SimpleHTTPRequestHandler):
    # Zusätzliche MIME-Types für PWA
    extensions_map = {
        **SimpleHTTPRequestHandler.extensions_map,
        ".json":        "application/json",
        ".webmanifest": "application/manifest+json",
        ".svg":         "image/svg+xml",
        ".ico":         "image/vnd.microsoft.icon",
        ".js":          "application/javascript",
        ".mjs":         "application/javascript",
    }

    def log_message(self, fmt, *args): pass

    def end_headers(self):
        # Service Worker: Cache-Control auf keinen Fall aggressiv,
        # damit Updates schnell ausgerollt werden.
        # Manifest und SW-Datei: immer frisch holen.
        p = self.path.split("?")[0]
        if p.endswith("washos-sw.js") or p.endswith("manifest.json"):
            self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
            self.send_header("Service-Worker-Allowed", "/")
        # Icons und App-Shell dürfen kurz gecacht werden (SW übernimmt eh Langzeit-Cache)
        elif "/icons/" in p or p.endswith(".html"):
            self.send_header("Cache-Control", "no-cache")
        super().end_headers()

    def do_POST(self):
        """API-Proxy für Steuerungsbefehle vom Dashboard"""
        if self.path.startswith("/api/"):
            try:
                content_len = int(self.headers.get("Content-Length", 0))
                body = self.rfile.read(content_len) if content_len else b""
                payload = json.loads(body) if body else {}

                action = self.path.split("/api/")[1]
                result = {"ok": False, "error": "Unbekannte Aktion"}

                if action == "seko/debug-flow":
                    # TEMPORÄR: Debug-Endpoint für Seko Flow-Records Analyse.
                    # Holt Rohdaten direkt von Seko ohne Interpretation.
                    # Payload: {"hours_back": 24, "include_raw": true}
                    try:
                        import importlib.util
                        seko_path = os.path.join(os.path.dirname(__file__), "washos-seko.py")
                        spec = importlib.util.spec_from_file_location("washos_seko", seko_path)
                        seko_mod = importlib.util.module_from_spec(spec)
                        spec.loader.exec_module(seko_mod)
                    except Exception as e:
                        result = {"ok": False, "error": f"Seko-Modul laden: {e}"}
                    else:
                        hours_back = payload.get("hours_back", 24)
                        include_raw = payload.get("include_raw", True)

                        try:
                            seko_mod.load_credentials_from_settings(data_dir)
                        except Exception:
                            pass
                        poller = seko_mod.SekoPoller(output_dir=data_dir)
                        if not poller.login():
                            result = {"ok": False, "error": "Seko-Login fehlgeschlagen"}
                        else:
                            now_s = int(time.time())
                            start_s = now_s - hours_back * 3600
                            results = {}
                            for gid in seko_mod.GIDS:
                                req_params = {
                                    "startTime": start_s,
                                    "endTime": now_s,
                                    "washer": seko_mod.WASHER_MAP,
                                    "ownerID": seko_mod.SEKO_OWNER,
                                    "applicationID": seko_mod.SEKO_APP_ID,
                                    "GID": gid,
                                    "installationSiteName": "Waschsalon Nord GmbH",
                                }
                                raw_data = None
                                http_status = None
                                try:
                                    rr = poller.session.post(
                                        f"{seko_mod.SEKO_BASE}/application/ctrl_getflowrecords",
                                        json=req_params, timeout=30,
                                    )
                                    http_status = rr.status_code
                                    if rr.status_code == 200:
                                        raw_data = rr.json()
                                except Exception as e:
                                    raw_data = {"error": str(e)}

                                summary = {"gid": gid, "http": http_status}
                                if raw_data and isinstance(raw_data, dict):
                                    summary["top_keys"] = list(raw_data.keys())
                                    devicedata = raw_data.get("devicedata") or {}
                                    if isinstance(devicedata, dict):
                                        summary["devicedata_keys"] = list(devicedata.keys())
                                        cf = devicedata.get("CHEMICAL_FLOW") or {}
                                        if isinstance(cf, dict):
                                            summary["chemical_flow_keys"] = list(cf.keys())
                                            gid_bucket = cf.get(gid) or {}
                                            if isinstance(gid_bucket, dict):
                                                summary["chemicals_in_gid"] = list(gid_bucket.keys())
                                                chem_details = {}
                                                for ck, cd in gid_bucket.items():
                                                    if isinstance(cd, dict):
                                                        chem_details[ck] = {
                                                            "keys": list(cd.keys()),
                                                            "has_DDQ": "DDQ" in cd,
                                                            "has_WAS": "WAS" in cd,
                                                        }
                                                        was = cd.get("WAS") or {}
                                                        if isinstance(was, dict) and was:
                                                            first_was_key = list(was.keys())[0]
                                                            first_was = was[first_was_key]
                                                            if isinstance(first_was, dict):
                                                                chem_details[ck]["first_WAS_keys"] = list(first_was.keys())
                                                                chs = first_was.get("CHS") or {}
                                                                if isinstance(chs, dict):
                                                                    chem_details[ck]["CHS_keys"] = list(chs.keys())
                                                                    for chk, chv in chs.items():
                                                                        if isinstance(chv, list):
                                                                            chem_details[ck][f"CHS_{chk}_count"] = len(chv)
                                                                            if chv and isinstance(chv[0], dict):
                                                                                chem_details[ck][f"CHS_{chk}_first"] = chv[0]
                                                                                if len(chv) > 1:
                                                                                    chem_details[ck][f"CHS_{chk}_last"] = chv[-1]
                                                        ddq = cd.get("DDQ")
                                                        if isinstance(ddq, list):
                                                            chem_details[ck]["DDQ_count"] = len(ddq)
                                                            if ddq:
                                                                chem_details[ck]["DDQ_first"] = ddq[0]
                                                                chem_details[ck]["DDQ_last"] = ddq[-1]
                                                summary["chemical_details"] = chem_details
                                            all_bucket = cf.get("ALL") or {}
                                            if isinstance(all_bucket, dict):
                                                summary["ALL_bucket_keys"] = list(all_bucket.keys())
                                                # DDQ im ALL-Bucket analysieren
                                                all_details = {}
                                                for ck, cd in all_bucket.items():
                                                    if isinstance(cd, dict):
                                                        all_details[ck] = {"keys": list(cd.keys())}
                                                        ddq = cd.get("DDQ")
                                                        if isinstance(ddq, list):
                                                            all_details[ck]["DDQ_count"] = len(ddq)
                                                            if ddq:
                                                                all_details[ck]["DDQ_first"] = ddq[0]
                                                                all_details[ck]["DDQ_last"] = ddq[-1]
                                                summary["ALL_details"] = all_details
                                results[gid[:8]] = summary

                            out = {"ok": True, "hours_back": hours_back, "results": results}
                            if include_raw and seko_mod.GIDS:
                                try:
                                    first_gid = seko_mod.GIDS[0]
                                    req_params = {
                                        "startTime": start_s,
                                        "endTime": now_s,
                                        "washer": seko_mod.WASHER_MAP,
                                        "ownerID": seko_mod.SEKO_OWNER,
                                        "applicationID": seko_mod.SEKO_APP_ID,
                                        "GID": first_gid,
                                        "installationSiteName": "Waschsalon Nord GmbH",
                                    }
                                    rr2 = poller.session.post(
                                        f"{seko_mod.SEKO_BASE}/application/ctrl_getflowrecords",
                                        json=req_params, timeout=30,
                                    )
                                    out["raw_sample"] = rr2.json() if rr2.status_code == 200 else None
                                except Exception:
                                    pass
                            result = out

                elif action == "automation/toggle":
                    # Schalter umschalten (Tür, Licht, Wasser etc.)
                    # Zwei-Schritt-Prozess: 1) auf manuell setzen 2) schalten
                    device_id = payload.get("id")  # z.B. "porta", "insegna"
                    is_on = payload.get("isOn")     # True/False
                    if device_id and is_on is not None:
                        # Schritt 1: Auf manuell umstellen
                        r1 = session.put(
                            f"{BASE_URL}/automation",
                            params={"idLCS": ID_LCS, "isManual": "true", "idAutomation": device_id},
                            timeout=10,
                        )
                        time.sleep(1)
                        # Schritt 2: Schalten
                        r2 = session.put(
                            f"{BASE_URL}/automation",
                            params={"idLCS": ID_LCS, "isOn": str(is_on).lower(), "idAutomation": device_id},
                            timeout=10,
                        )
                        result = {"ok": r2.status_code == 200, "status": r2.status_code}
                        ts = datetime.now().strftime("%H:%M:%S")
                        state_str = "AN" if is_on else "AUS"
                        print(f"[{ts}] STEUERUNG | {device_id} → {state_str} (Manual:{r1.status_code} Switch:{r2.status_code})")

                elif action == "automation/manual":
                    # Manuell-Modus umschalten
                    device_id = payload.get("id")
                    is_manual = payload.get("isManual")
                    if device_id and is_manual is not None:
                        r = session.put(
                            f"{BASE_URL}/automation",
                            params={"idLCS": ID_LCS, "isManual": str(is_manual).lower(), "idAutomation": device_id},
                            timeout=10,
                        )
                        result = {"ok": r.status_code == 200, "status": r.status_code}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] STEUERUNG | {device_id} Manuell={is_manual} (HTTP {r.status_code})")

                elif action == "reboot":
                    # LCS oder Cashbox neustarten
                    target = payload.get("target", "lcs")  # "lcs" oder "cashbox"
                    if target == "cashbox":
                        r = session.post(
                            f"{BASE_URL}/reboot",
                            params={"laundry": LAUNDRY_ID, "target": "cashbox"},
                            timeout=15,
                        )
                    else:
                        r = session.post(
                            f"{BASE_URL}/reboot",
                            params={"laundry": LAUNDRY_ID},
                            timeout=15,
                        )
                    result = {"ok": r.status_code == 200, "status": r.status_code}
                    ts = datetime.now().strftime("%H:%M:%S")
                    print(f"[{ts}] STEUERUNG | REBOOT {target} (HTTP {r.status_code})")

                elif action == "device/service":
                    # Maschine In Betrieb / Außer Betrieb setzen
                    # API: PUT /updates?idLCS=...&key=... mit komplettem Geräteobjekt
                    serial = payload.get("serial")
                    in_service = payload.get("inService")
                    lcs_key = payload.get("key", LCS_KEY)
                    if serial and in_service is not None:
                        # Aktuelles Geräteobjekt von Bloomest holen
                        devices_raw = api_get("updates", {"idLCS": ID_LCS})
                        dev = next((d for d in devices_raw if d.get("serialNumber") == serial), None)
                        if not dev:
                            result = {"ok": False, "error": f"Gerät {serial} nicht gefunden"}
                        else:
                            dev["isInService"] = in_service
                            r = session.put(
                                f"{BASE_URL}/updates",
                                params={"idLCS": ID_LCS, "key": lcs_key},
                                json=dev,
                                timeout=10,
                            )
                            result = {"ok": r.status_code == 200, "status": r.status_code}
                            ts = datetime.now().strftime("%H:%M:%S")
                            state = "IN BETRIEB" if in_service else "AUSSER BETRIEB"
                            print(f"[{ts}] GERÄTE | {serial} → {state} (HTTP {r.status_code})")

                elif action == "device/release" or action == "device/use":
                    # Maschine freischalten (isInUse=true) oder wieder freigeben (isInUse=false)
                    # Bei Trocknern: timeBought = gekaufte Minuten
                    serial = payload.get("serial")
                    in_use = payload.get("inUse", False)
                    time_bought = payload.get("timeBought", 0)  # Trockner: Minuten
                    lcs_key = payload.get("key", LCS_KEY)
                    if serial:
                        devices_raw = api_get("updates", {"idLCS": ID_LCS})
                        dev = next((d for d in devices_raw if d.get("serialNumber") == serial), None)
                        if not dev:
                            result = {"ok": False, "error": f"Gerät {serial} nicht gefunden"}
                        else:
                            dev["isInUse"] = in_use
                            if time_bought > 0:
                                dev["timeBought"] = time_bought
                            elif not in_use:
                                dev["timeBought"] = 0
                            r = session.put(
                                f"{BASE_URL}/updates",
                                params={"idLCS": ID_LCS, "key": lcs_key},
                                json=dev,
                                timeout=10,
                            )
                            result = {"ok": r.status_code == 200, "status": r.status_code}
                            ts = datetime.now().strftime("%H:%M:%S")
                            mins_str = f" ({time_bought} Min.)" if time_bought > 0 else ""
                            state = f"FREIGESCHALTET{mins_str}" if in_use else "FREIGEGEBEN"
                            print(f"[{ts}] GERÄTE | {serial} {state} (HTTP {r.status_code})")

                elif action == "device/prices":
                    # Preise für ein Gerät ändern
                    # API: PUT /laundries mit einzelnem Geräteobjekt (NICHT /updates!)
                    # toSend NICHT auf true setzen — Bloomest setzt es selbst
                    serial = payload.get("serial")
                    prices = payload.get("prices")  # pricesConfiguration Array
                    if serial and prices:
                        devices_raw = api_get("updates", {"idLCS": ID_LCS})
                        dev = next((d for d in devices_raw if d.get("serialNumber") == serial), None)
                        if not dev:
                            result = {"ok": False, "error": f"Gerät {serial} nicht gefunden"}
                        else:
                            # Alte Preise für Historie merken
                            old_prices = dev.get("pricesConfiguration") or []
                            dev_label = dev.get("label", "?")
                            dev_model = dev.get("model", "?")

                            dev["pricesConfiguration"] = prices
                            r = session.put(
                                f"{BASE_URL}/laundries",
                                json=dev,
                                timeout=10,
                            )
                            ok = r.status_code == 200
                            result = {"ok": ok, "status": r.status_code}
                            ts = datetime.now().strftime("%H:%M:%S")
                            print(f"[{ts}] PREISE | {serial} ({len(prices)} Slots) (HTTP {r.status_code})")

                            # Historie nur bei Erfolg fortschreiben (PR-A)
                            if ok:
                                try:
                                    ph_file = os.path.join(data_dir, "price_history.json")
                                    history = []
                                    if os.path.exists(ph_file):
                                        try:
                                            with open(ph_file, encoding="utf-8") as f:
                                                history = json.load(f)
                                            if not isinstance(history, list):
                                                history = []
                                        except Exception:
                                            history = []
                                    # Diffs zwischen alten und neuen Preisen ermitteln
                                    changes = []
                                    for i, new_p in enumerate(prices):
                                        old_p = old_prices[i] if i < len(old_prices) else {}
                                        for field in ("cash", "cashless", "discount1", "discount2"):
                                            ov = old_p.get(field, 0) or 0
                                            nv = new_p.get(field, 0) or 0
                                            if ov != nv:
                                                changes.append({
                                                    "slot": i,
                                                    "baseMinutes": new_p.get("baseMinutes"),
                                                    "field": field,
                                                    "old": ov,
                                                    "new": nv,
                                                })
                                    if changes:
                                        history.append({
                                            "ts": datetime.now().isoformat(),
                                            "serial": serial,
                                            "label": dev_label,
                                            "model": dev_model,
                                            "changes": changes,
                                        })
                                        # Letzte 200 Einträge behalten
                                        history = history[-200:]
                                        write_json_atomic(ph_file, history)
                                except Exception as e:
                                    print(f"  WARN: Preis-Historie konnte nicht geschrieben werden: {e}")

                elif action == "prices/history":
                    # Preis-Historie laden (PR-A)
                    ph_file = os.path.join(data_dir, "price_history.json")
                    if os.path.exists(ph_file):
                        try:
                            with open(ph_file, encoding="utf-8") as f:
                                history = json.load(f)
                            result = {"ok": True, "history": history}
                        except Exception as e:
                            result = {"ok": False, "error": str(e)}
                    else:
                        result = {"ok": True, "history": []}

                elif action == "prices/history/add":
                    # #11: Manuelle rückwirkende Preis-Historie-Einträge
                    # Payload: { date: "YYYY-MM-DD", label: "...", model: "...",
                    #            field: "cash|cashless|discount1|discount2",
                    #            old_eur: 2.50, new_eur: 3.00, note: "..." }
                    ph_file = os.path.join(data_dir, "price_history.json")
                    try:
                        date_str = payload.get("date")
                        field = payload.get("field")
                        old_eur = payload.get("old_eur")
                        new_eur = payload.get("new_eur")
                        if not date_str or not field or old_eur is None or new_eur is None:
                            result = {"ok": False, "error": "Datum, Feld, Preis alt und Preis neu sind Pflichtfelder"}
                        else:
                            history = []
                            if os.path.exists(ph_file):
                                try:
                                    with open(ph_file, encoding="utf-8") as f:
                                        history = json.load(f)
                                    if not isinstance(history, list):
                                        history = []
                                except Exception:
                                    history = []
                            # Zeitstempel: Datum + 12:00 Uhr (Platzhalter)
                            try:
                                ts = datetime.fromisoformat(date_str + "T12:00:00").isoformat()
                            except Exception:
                                ts = date_str + "T12:00:00"
                            entry = {
                                "ts": ts,
                                "label": payload.get("label") or "(alle)",
                                "model": payload.get("model") or "",
                                "manual": True,
                                "note": payload.get("note") or "",
                                "changes": [{
                                    "field": field,
                                    "old": int(round(float(old_eur) * 100)),
                                    "new": int(round(float(new_eur) * 100)),
                                }],
                            }
                            history.append(entry)
                            # Nach Datum sortieren (älteste zuerst)
                            history.sort(key=lambda e: e.get("ts", ""))
                            history = history[-500:]  # max. 500 Einträge
                            write_json_atomic(ph_file, history)
                            result = {"ok": True, "entry": entry}
                            ts2 = datetime.now().strftime("%H:%M:%S")
                            print(f"[{ts2}] PRICE-HIST | manuell: {date_str} {entry['label']} {field} {old_eur}→{new_eur}€")
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "refresh":
                    # Sofort Live-Daten neu laden
                    try:
                        devices_raw = api_get("updates", {"idLCS": ID_LCS})
                        auto_raw = api_get("automation", {"idLCS": ID_LCS})
                        result = {"ok": True, "devices": len(devices_raw)}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "inventory/save":
                    # Lagerbestand speichern + Historie fortschreiben (WM-C)
                    inv_file = os.path.join(data_dir, "waschmittel-bestand.json")
                    hist_file = os.path.join(data_dir, "inventory_history.json")
                    try:
                        # Alten Stand laden für Diff
                        old = {"chemicals": {}}
                        if os.path.exists(inv_file):
                            try:
                                with open(inv_file, "r", encoding="utf-8") as f:
                                    old = json.load(f)
                            except Exception:
                                old = {"chemicals": {}}

                        new_chems = payload.get("chemicals", {}) or {}
                        old_chems = old.get("chemicals", {}) or {}

                        # Historie laden
                        history = []
                        if os.path.exists(hist_file):
                            try:
                                with open(hist_file, "r", encoding="utf-8") as f:
                                    history = json.load(f)
                                if not isinstance(history, list):
                                    history = []
                            except Exception:
                                history = []

                        # Diff pro Chemikalie
                        now_iso = datetime.now().isoformat()
                        for chem, entry in new_chems.items():
                            new_kanister = entry.get("kanister", 0)
                            old_kanister = (old_chems.get(chem) or {}).get("kanister", 0)
                            if new_kanister != old_kanister:
                                history.append({
                                    "ts": now_iso,
                                    "chem": chem,
                                    "from_kanister": old_kanister,
                                    "to_kanister": new_kanister,
                                    "delta": new_kanister - old_kanister,
                                })
                        # Auf 500 Einträge begrenzen
                        history = history[-500:]
                        write_json_atomic(hist_file, history)

                        write_json_atomic(inv_file, payload)
                        result = {"ok": True}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] INVENTAR | Bestand gespeichert ({len(new_chems)} Chemikalien)")
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "inventory/load":
                    # Lagerbestand laden
                    inv_file = os.path.join(data_dir, "waschmittel-bestand.json")
                    if os.path.exists(inv_file):
                        with open(inv_file, "r", encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": None}

                elif action == "inventory/history":
                    # Bestandsänderungs-Historie (WM-C)
                    hist_file = os.path.join(data_dir, "inventory_history.json")
                    if os.path.exists(hist_file):
                        try:
                            with open(hist_file, "r", encoding="utf-8") as f:
                                result = {"ok": True, "history": json.load(f)}
                        except Exception as e:
                            result = {"ok": False, "error": str(e), "history": []}
                    else:
                        result = {"ok": True, "history": []}

                elif action == "maintenance/reset":
                    # Wartungs-Reset: aktuellen Stand der Betriebsstunden
                    # als neuen Nullpunkt für diese Maschine speichern
                    # Optional: custom_date + custom_hours für rückwirkende Einträge
                    # Optional (WE-C): costs — Wartungskosten in €
                    machine = payload.get("machine")
                    current_h = payload.get("current_hours", 0)
                    note = payload.get("note", "")
                    custom_date = payload.get("custom_date")  # ISO z.B. "2026-01-15"
                    costs = payload.get("costs")  # None oder float
                    if machine:
                        m_file = os.path.join(data_dir, "maintenance.json")
                        if os.path.exists(m_file):
                            with open(m_file, encoding="utf-8") as f:
                                mdata = json.load(f)
                        else:
                            mdata = {"machines": {}, "history": []}

                        # Timestamp: custom_date oder "jetzt"
                        if custom_date:
                            try:
                                # Datum auf Mittag UTC setzen
                                dt = datetime.fromisoformat(custom_date).replace(hour=12, minute=0, tzinfo=timezone.utc)
                                entry_ts = dt.isoformat()
                            except Exception:
                                entry_ts = datetime.now(timezone.utc).isoformat()
                        else:
                            entry_ts = datetime.now(timezone.utc).isoformat()

                        mdata["machines"][machine] = {
                            "last_reset_hours": current_h,
                            "last_reset_at": entry_ts,
                            "note": note,
                            "last_costs": costs,
                        }
                        mdata["history"].append({
                            "machine": machine,
                            "at": entry_ts,
                            "hours_at_reset": current_h,
                            "note": note,
                            "retroactive": bool(custom_date),
                            "costs": costs,
                        })
                        # Historie nach Datum sortieren (damit 'letzte' immer stimmt)
                        mdata["history"].sort(key=lambda h: h.get("at", ""))
                        # Für jede Maschine: das machines-Dict auf den neuesten Eintrag setzen
                        for m in mdata["machines"].keys():
                            hist_m = [h for h in mdata["history"] if h.get("machine") == m]
                            if hist_m:
                                last = hist_m[-1]
                                mdata["machines"][m] = {
                                    "last_reset_hours": last["hours_at_reset"],
                                    "last_reset_at": last["at"],
                                    "note": last.get("note", ""),
                                    "last_costs": last.get("costs"),
                                }
                        write_json_atomic(m_file, mdata)
                        result = {"ok": True}
                        ts = datetime.now().strftime("%H:%M:%S")
                        tag = " (rückwirkend)" if custom_date else ""
                        costs_tag = f" · {costs} €" if costs is not None else ""
                        print(f"[{ts}] WARTUNG | {machine}: Reset bei {current_h}h{tag}{costs_tag}")

                elif action == "maintenance/load":
                    # Wartungs-Historie laden
                    m_file = os.path.join(data_dir, "maintenance.json")
                    if os.path.exists(m_file):
                        with open(m_file, encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": {"machines": {}, "history": []}}

                elif action == "maintenance/undo":
                    # Letzten Reset einer Maschine rückgängig
                    machine = payload.get("machine")
                    if machine:
                        m_file = os.path.join(data_dir, "maintenance.json")
                        if os.path.exists(m_file):
                            with open(m_file, encoding="utf-8") as f:
                                mdata = json.load(f)
                            # Letzten Eintrag dieser Maschine aus history entfernen
                            mdata["history"] = [h for h in mdata.get("history", [])
                                if not (h.get("machine") == machine and
                                        h.get("at") == mdata.get("machines", {}).get(machine, {}).get("last_reset_at"))]
                            # Vorherigen Reset als aktuell setzen (oder löschen)
                            prev = [h for h in mdata["history"] if h.get("machine") == machine]
                            if prev:
                                last = prev[-1]
                                mdata["machines"][machine] = {
                                    "last_reset_hours": last["hours_at_reset"],
                                    "last_reset_at": last["at"],
                                    "note": last.get("note", ""),
                                }
                            else:
                                mdata["machines"].pop(machine, None)
                            write_json_atomic(m_file, mdata)
                            result = {"ok": True}
                            ts = datetime.now().strftime("%H:%M:%S")
                            print(f"[{ts}] WARTUNG | {machine}: Reset rückgängig")

                elif action == "settings/load":
                    # Tarife + Waschmittel-Preise laden (ST-A)
                    settings_file = os.path.join(data_dir, "settings.json")
                    if os.path.exists(settings_file):
                        try:
                            with open(settings_file, encoding="utf-8") as f:
                                raw = json.load(f)
                            # Sensible Felder maskieren (nie Passwörter an den Browser zurückgeben)
                            def sanitize_creds(block):
                                if not isinstance(block, dict):
                                    return block
                                out = {k: v for k, v in block.items() if k != "password"}
                                out["password_set"] = bool(block.get("password"))
                                return out
                            if isinstance(raw.get("miele"), dict):
                                raw["miele"] = sanitize_creds(raw["miele"])
                            if isinstance(raw.get("seko"), dict):
                                raw["seko"] = sanitize_creds(raw["seko"])
                            if isinstance(raw.get("bloomest"), dict):
                                raw["bloomest"] = sanitize_creds(raw["bloomest"])
                            result = {"ok": True, "data": raw}
                        except Exception as e:
                            result = {"ok": False, "error": str(e)}
                    else:
                        # Defaults
                        result = {"ok": True, "data": {
                            "tariffs": {
                                "strom": 0.32,
                                "wasser": 3.20,
                                "abwasser": 2.80,
                                "warmwasser_aufschlag": 0.0,
                            },
                            "chemicals": {},
                            "tariff_history": [],
                        }}

                elif action == "settings/save":
                    # Settings speichern. Wenn tariffs sich ändern: in Historie schreiben (ST-C)
                    settings_file = os.path.join(data_dir, "settings.json")
                    try:
                        new_data = payload.get("data") or {}
                        # Bestehende Daten laden
                        old_data = {"tariffs": {}, "chemicals": {}, "tariff_history": []}
                        if os.path.exists(settings_file):
                            try:
                                with open(settings_file, encoding="utf-8") as f:
                                    old_data = json.load(f)
                            except Exception:
                                pass

                        # Credentials: wenn Browser Passwort nicht mitschickt → altes erhalten.
                        # Der Browser sanitized die eigenen Kopien (password_set-Flag), die müssen wir
                        # ebenfalls wegwerfen weil das kein persistentes Feld ist.
                        for cred_key in ("miele", "seko", "bloomest"):
                            new_block = new_data.get(cred_key)
                            if not isinstance(new_block, dict):
                                continue
                            old_block = old_data.get(cred_key) or {}
                            # password_set ist nur Ansicht, niemals speichern
                            new_block.pop("password_set", None)
                            # Leeres oder fehlendes Passwort → altes übernehmen
                            if not new_block.get("password") and old_block.get("password"):
                                new_block["password"] = old_block["password"]
                            new_data[cred_key] = new_block

                        # Tarif-Historie fortschreiben wenn sich was ändert
                        old_tariffs = old_data.get("tariffs", {})
                        new_tariffs = new_data.get("tariffs", {})
                        history = old_data.get("tariff_history", []) or []
                        tariff_changed = any(
                            old_tariffs.get(k) != new_tariffs.get(k)
                            for k in ("strom", "wasser", "abwasser", "warmwasser_aufschlag")
                        )
                        if tariff_changed and old_tariffs:
                            # Den alten Stand mit Gültigkeits-Ende versehen
                            history.append({
                                "valid_from": old_data.get("tariffs_valid_from") or datetime(2024, 1, 1).isoformat(),
                                "valid_until": datetime.now().isoformat(),
                                "tariffs": dict(old_tariffs),
                            })
                            history = history[-50:]  # letzte 50 behalten

                        new_data["tariff_history"] = history
                        if tariff_changed or not old_tariffs:
                            new_data["tariffs_valid_from"] = datetime.now().isoformat()
                        elif "tariffs_valid_from" not in new_data:
                            new_data["tariffs_valid_from"] = old_data.get("tariffs_valid_from") or datetime(2024, 1, 1).isoformat()

                        write_json_atomic(settings_file, new_data)
                        result = {"ok": True, "changed": tariff_changed}
                        ts = datetime.now().strftime("%H:%M:%S")
                        tag = " (Tarife geändert)" if tariff_changed else ""
                        print(f"[{ts}] SETTINGS | gespeichert{tag}")
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "miele/test":
                    # Login-Test: frisch geladene Credentials aus settings.json
                    try:
                        import importlib
                        miele_mod = importlib.import_module("washos-miele")
                        if hasattr(miele_mod, "try_login_with_settings"):
                            ok, msg, devices = miele_mod.try_login_with_settings(data_dir)
                            result = {"ok": ok, "error": None if ok else msg, "devices": devices}
                        else:
                            result = {"ok": False, "error": "Miele-Modul unterstützt keinen Test-Login (washos-miele.py veraltet)"}
                    except ImportError as e:
                        result = {"ok": False, "error": f"washos-miele.py nicht gefunden: {e}"}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "seko/test":
                    # Login-Test: frisch geladene Credentials aus settings.json
                    try:
                        import importlib
                        seko_mod = importlib.import_module("washos-seko")
                        if hasattr(seko_mod, "try_login_with_settings"):
                            ok, msg, pumps = seko_mod.try_login_with_settings(data_dir)
                            result = {"ok": ok, "error": None if ok else msg, "pumps": pumps}
                        else:
                            result = {"ok": False, "error": "Seko-Modul unterstützt keinen Test-Login (washos-seko.py veraltet)"}
                    except ImportError as e:
                        result = {"ok": False, "error": f"washos-seko.py nicht gefunden: {e}"}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "alerts/acknowledge":
                    # Alarm als "gesehen" markieren (AL-D)
                    # Optional: snooze_until ISO (temporär ausblenden)
                    alert_id = payload.get("id")
                    snooze_until = payload.get("snooze_until")  # ISO oder None
                    note = payload.get("note", "")
                    if alert_id:
                        ack_file = os.path.join(data_dir, "alerts_ack.json")
                        acks = {}
                        if os.path.exists(ack_file):
                            try:
                                with open(ack_file, encoding="utf-8") as f:
                                    acks = json.load(f)
                            except Exception:
                                acks = {}
                        acks[alert_id] = {
                            "acknowledged_at": datetime.now().isoformat(),
                            "snooze_until": snooze_until,
                            "note": note,
                        }
                        # Alte Bestätigungen aufräumen (älter als 30 Tage und keine snooze mehr)
                        cutoff = (datetime.now() - timedelta(days=30)).isoformat()
                        acks = {k: v for k, v in acks.items() if v.get("acknowledged_at", "") > cutoff or v.get("snooze_until")}
                        write_json_atomic(ack_file, acks)
                        result = {"ok": True}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] ALARM | bestätigt: {alert_id[:40]}")
                    else:
                        result = {"ok": False, "error": "id fehlt"}

                elif action == "alerts/unacknowledge":
                    # Bestätigung widerrufen
                    alert_id = payload.get("id")
                    if alert_id:
                        ack_file = os.path.join(data_dir, "alerts_ack.json")
                        if os.path.exists(ack_file):
                            try:
                                with open(ack_file, encoding="utf-8") as f:
                                    acks = json.load(f)
                                acks.pop(alert_id, None)
                                write_json_atomic(ack_file, acks)
                            except Exception:
                                pass
                        result = {"ok": True}
                    else:
                        result = {"ok": False, "error": "id fehlt"}

                elif action == "alerts/ack-list":
                    # Aktuelle Bestätigungen laden (für Frontend-Filter)
                    ack_file = os.path.join(data_dir, "alerts_ack.json")
                    if os.path.exists(ack_file):
                        try:
                            with open(ack_file, encoding="utf-8") as f:
                                result = {"ok": True, "acks": json.load(f)}
                        except Exception as e:
                            result = {"ok": False, "error": str(e), "acks": {}}
                    else:
                        result = {"ok": True, "acks": {}}

                elif action == "alerts/history/log":
                    # Client meldet: diese Alarme sind aktuell aktiv (AL-E)
                    # Server vergleicht mit letztem Snapshot und schreibt
                    # neue/behobene Alarme in die Historie
                    try:
                        active = payload.get("active") or []  # Liste von {id, severity, category, title, detail}
                        log_file = os.path.join(data_dir, "alerts_log.json")
                        active_file = os.path.join(data_dir, "alerts_active.json")

                        # Historie laden
                        log = []
                        if os.path.exists(log_file):
                            try:
                                with open(log_file, encoding="utf-8") as f:
                                    log = json.load(f)
                                if not isinstance(log, list):
                                    log = []
                            except Exception:
                                log = []

                        # Vorherige Aktive laden
                        prev_active = {}
                        if os.path.exists(active_file):
                            try:
                                with open(active_file, encoding="utf-8") as f:
                                    prev_active = json.load(f)
                            except Exception:
                                prev_active = {}

                        now_iso = datetime.now().isoformat()
                        current_ids = {a["id"]: a for a in active if a.get("id")}

                        # Neue Alarme (die jetzt da sind, vorher nicht) → als "started" loggen
                        for aid, a in current_ids.items():
                            if aid not in prev_active:
                                log.append({
                                    "id": aid,
                                    "event": "started",
                                    "ts": now_iso,
                                    "severity": a.get("severity"),
                                    "category": a.get("category"),
                                    "title": a.get("title"),
                                    "detail": a.get("detail"),
                                })
                                # v1.0 Push-Notification bei neuem roten Alarm
                                if _PUSH_LIB_AVAILABLE and a.get("severity") == "red":
                                    try:
                                        push_title = "⚠ WashOS Alarm"
                                        push_body = (a.get("title") or "Neuer Alarm") + "\n" + (a.get("detail") or "")
                                        send_push_to_all(push_title, push_body[:200], url="/", tag="alert-" + str(aid))
                                    except Exception as e:
                                        print(f"[PUSH-ALARM] {e}")

                        # Behobene (die vorher da waren, jetzt nicht mehr) → "resolved"
                        for aid, a in prev_active.items():
                            if aid not in current_ids:
                                log.append({
                                    "id": aid,
                                    "event": "resolved",
                                    "ts": now_iso,
                                    "severity": a.get("severity"),
                                    "category": a.get("category"),
                                    "title": a.get("title"),
                                })

                        # Letzte 500 Events aufbewahren
                        log = log[-500:]
                        write_json_atomic(log_file, log)
                        # Aktuellen Snapshot speichern
                        write_json_atomic(active_file, {aid: {
                            "severity": a.get("severity"),
                            "category": a.get("category"),
                            "title": a.get("title"),
                            "detail": a.get("detail"),
                            "first_seen": prev_active.get(aid, {}).get("first_seen", now_iso),
                        } for aid, a in current_ids.items()})

                        result = {"ok": True, "logged": len(log)}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "alerts/history/load":
                    # Alarm-Historie laden
                    log_file = os.path.join(data_dir, "alerts_log.json")
                    if os.path.exists(log_file):
                        try:
                            with open(log_file, encoding="utf-8") as f:
                                result = {"ok": True, "log": json.load(f)}
                        except Exception as e:
                            result = {"ok": False, "error": str(e), "log": []}
                    else:
                        result = {"ok": True, "log": []}

                elif action == "push/public-key":
                    # VAPID Public Key für Browser-Subscription (v1.0)
                    pk = get_vapid_public_key_string()
                    if pk:
                        result = {"ok": True, "public_key": pk, "push_enabled": _PUSH_LIB_AVAILABLE}
                    else:
                        result = {"ok": False, "error": "Push nicht verfügbar (pywebpush fehlt?)", "push_enabled": _PUSH_LIB_AVAILABLE}

                elif action == "push/test":
                    # Test-Push an alle Subscriptions (v1.0)
                    title = payload.get("title", "WashOS — Test")
                    body = payload.get("body", "Das ist ein Test-Push. Wenn du das siehst: alles funktioniert!")
                    sent, failed = send_push_to_all(title, body, url="/", tag="test")
                    result = {"ok": True, "sent": sent, "failed": failed}

                elif action == "push/send-digest-now":
                    # Manuellen Digest-Versand auslösen (v1.0)
                    try:
                        send_daily_digest()
                        result = {"ok": True}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "push/subscribe":
                    # Push-Notification-Subscription speichern (AL-H)
                    # Eigentliche Push-Umsetzung kommt serverseitig später —
                    # aktuell nur Speicherung der Subscription vom Browser
                    sub = payload.get("subscription")
                    if sub:
                        push_file = os.path.join(data_dir, "push_subscriptions.json")
                        subs = []
                        if os.path.exists(push_file):
                            try:
                                with open(push_file, encoding="utf-8") as f:
                                    subs = json.load(f)
                                if not isinstance(subs, list):
                                    subs = []
                            except Exception:
                                subs = []
                        # Duplikat-Check über endpoint
                        endpoint = sub.get("endpoint", "")
                        subs = [s for s in subs if s.get("endpoint") != endpoint]
                        subs.append({
                            "subscription": sub,
                            "added_at": datetime.now().isoformat(),
                        })
                        write_json_atomic(push_file, subs)
                        result = {"ok": True, "count": len(subs)}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] PUSH | Subscription gespeichert ({len(subs)} gesamt)")
                    else:
                        result = {"ok": False, "error": "subscription fehlt"}

                elif action == "push/unsubscribe":
                    # Subscription entfernen
                    endpoint = payload.get("endpoint")
                    if endpoint:
                        push_file = os.path.join(data_dir, "push_subscriptions.json")
                        if os.path.exists(push_file):
                            try:
                                with open(push_file, encoding="utf-8") as f:
                                    subs = json.load(f)
                                subs = [s for s in subs if s.get("subscription", {}).get("endpoint") != endpoint]
                                write_json_atomic(push_file, subs)
                            except Exception:
                                pass
                        result = {"ok": True}
                    else:
                        result = {"ok": False, "error": "endpoint fehlt"}

                elif action == "cashbox/reset-banknotes":
                    # Scheinfach wurde geleert → Reset-Zeitstempel speichern
                    # Optional: Ist-Wert mit übergeben (aus User-Eingabe bei Leerung)
                    # Historie wird in data/cashbox_history.json gepflegt
                    bn_file = os.path.join(data_dir, "cashbox_banknotes.json")
                    hist_file = os.path.join(data_dir, "cashbox_history.json")
                    try:
                        reset_at = datetime.now().isoformat()
                        ist_wert = payload.get("ist_wert")  # None oder float
                        geschaetzt = payload.get("geschaetzt")  # None oder float
                        note = payload.get("note", "")

                        # Haupt-Datei: aktuelles Reset-Datum
                        write_json_atomic(bn_file, {"reset_at": reset_at, "note": note})

                        # Historie laden/ergänzen
                        history = []
                        if os.path.exists(hist_file):
                            try:
                                with open(hist_file, encoding="utf-8") as f:
                                    history = json.load(f)
                                if not isinstance(history, list):
                                    history = []
                            except Exception:
                                history = []
                        history.append({
                            "reset_at": reset_at,
                            "ist_wert": ist_wert,
                            "geschaetzt": geschaetzt,
                            "diff": (ist_wert - geschaetzt) if (ist_wert is not None and geschaetzt is not None) else None,
                            "note": note,
                        })
                        # Nur die letzten 50 Einträge behalten
                        history = history[-50:]
                        write_json_atomic(hist_file, history)

                        result = {"ok": True, "reset_at": reset_at}
                        ts = datetime.now().strftime("%H:%M:%S")
                        if ist_wert is not None:
                            print(f"[{ts}] KASSE | Scheinfach geleert: Ist {ist_wert}€ vs Schätz {geschaetzt}€")
                        else:
                            print(f"[{ts}] KASSE | Scheinfach geleert (ohne Ist-Wert)")
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "cashbox/banknote-history":
                    # Leerungs-Historie laden
                    hist_file = os.path.join(data_dir, "cashbox_history.json")
                    if os.path.exists(hist_file):
                        try:
                            with open(hist_file, encoding="utf-8") as f:
                                history = json.load(f)
                            result = {"ok": True, "history": history}
                        except Exception as e:
                            result = {"ok": False, "error": str(e)}
                    else:
                        result = {"ok": True, "history": []}

                elif action == "cashbox/status":
                    # Münzzähler: Live-Bestand aus Hopper
                    try:
                        now = datetime.now()
                        body_cur = json.dumps({
                            'start': now.strftime(
                                '%Y-%m-%d 00:00:00'),
                            'end': now.strftime(
                                '%Y-%m-%d 23:59:59'),
                            'laundry': LAUNDRY_ID,
                        })
                        r = session.put(
                            BASE_URL + '/statistics'
                            + '?type=summaries_hopper_currency',
                            data=body_cur,
                            headers={
                                'Content-Type':
                                'application/json'
                            },
                            timeout=30,
                        )
                        if r.status_code == 200:
                            result = {
                                "ok": True,
                                "data": r.json()
                            }
                        else:
                            result = {
                                "ok": False,
                                "status": r.status_code
                            }
                    except Exception as e:
                        result = {
                            "ok": False,
                            "error": str(e)
                        }

                elif action == "users/list":
                    # Alle Treuekarten-Nutzer laden
                    try:
                        users_raw = api_get("users")
                        groups_raw = api_get("users", {"type": "groups"})
                        result = {"ok": True, "users": users_raw, "groups": groups_raw}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] NUTZER | {len(users_raw)} Karten geladen")
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "users/transactions":
                    # Transaktionshistorie für eine Karte
                    id_media = payload.get("idMedia")
                    start = payload.get("start", "2023-01-01 00:00:00")
                    end = payload.get("end")
                    if not end:
                        end = datetime.now().strftime("%Y-%m-%d 23:59:59")
                    if id_media:
                        try:
                            r = session.put(
                                f"{BASE_URL}/statistics",
                                params={"type": "all_movements"},
                                json={"start": start, "end": end, "user": id_media},
                                timeout=30,
                            )
                            if r.status_code == 200:
                                result = {"ok": True, "transactions": r.json()}
                            else:
                                result = {"ok": False, "status": r.status_code}
                            ts = datetime.now().strftime("%H:%M:%S")
                            print(f"[{ts}] NUTZER | Transaktionen für {id_media}: {len(result.get('transactions', []))} Einträge")
                        except Exception as e:
                            result = {"ok": False, "error": str(e)}

                elif action == "users/update":
                    # Karte aktualisieren: Guthaben, sperren, Rabatt
                    # Verifiziert: PUT /users?type=modifyUser&rechargeValue=X&timezone=Europe/Berlin
                    # Body: Kartenobjekt mit sendInsertNotification
                    id_media = payload.get("idMedia")
                    changes = payload.get("changes", {})
                    if id_media and changes:
                        try:
                            users_raw = api_get("users")
                            card = next((u for u in users_raw if u.get("idMedia") == id_media), None)
                            if not card:
                                result = {"ok": False, "error": f"Karte {id_media} nicht gefunden"}
                            else:
                                # Query-Parameter
                                params = {"type": "modifyUser", "timezone": "Europe/Berlin"}

                                # Guthaben: rechargeValue = Differenz in Cent
                                recharge = changes.get("residualCredit")
                                if recharge is not None:
                                    current = card.get("residualCredit", 0)
                                    diff = recharge - current
                                    if diff != 0:
                                        params["rechargeValue"] = diff

                                # Body: exakt die Felder die Bloomest erwartet
                                # WICHTIG: Datumsfelder müssen UTC ISO mit .000Z sein
                                # GET /users liefert lokale Zeit ohne Suffix,
                                # PUT erwartet UTC mit .000Z
                                def to_utc_iso(dt_str):
                                    if not dt_str:
                                        return dt_str
                                    if dt_str.endswith("Z"):
                                        return dt_str  # bereits UTC
                                    try:
                                        dt = datetime.fromisoformat(dt_str)
                                        # Bloomest liefert Europe/Berlin — manuell CET/CEST
                                        # CET = +1 (Okt-Mar), CEST = +2 (Mar-Okt)
                                        m = dt.month
                                        offset = 2 if 4 <= m <= 10 else 1
                                        utc_dt = dt - timedelta(hours=offset)
                                        return utc_dt.strftime("%Y-%m-%dT%H:%M:%S.000Z")
                                    except Exception:
                                        return dt_str + ".000Z" if not dt_str.endswith("Z") else dt_str

                                user_data = card.get("user") or {}
                                if user_data.get("registrationDay"):
                                    user_data = dict(user_data)
                                    user_data["registrationDay"] = to_utc_iso(user_data["registrationDay"])

                                body = {
                                    "idMedia": card.get("idMedia"),
                                    "idUserMedia": card.get("idUserMedia"),
                                    "serialNumber": card.get("serialNumber"),
                                    "alias": changes.get("alias", card.get("alias", "")),
                                    "residualCredit": card.get("residualCredit"),
                                    "discountPercentage": changes.get("discountPercentage", card.get("discountPercentage", 0)),
                                    "priceTable": changes.get("priceTable", card.get("priceTable", 0)),
                                    "lastMovement": to_utc_iso(card.get("lastMovement")),
                                    "blacklisted": changes.get("blacklisted", card.get("blacklisted", False)),
                                    "activation": to_utc_iso(card.get("activation")),
                                    "user": user_data,
                                    "groups": changes.get("groups", card.get("groups", [])),
                                    "sendInsertNotification": False,
                                }

                                r = session.put(
                                    f"{BASE_URL}/users",
                                    params=params,
                                    json=body,
                                    timeout=10,
                                )
                                result = {"ok": r.status_code == 200, "status": r.status_code}
                                ts = datetime.now().strftime("%H:%M:%S")
                                change_str = ", ".join(f"{k}={v}" for k, v in changes.items())
                                print(f"[{ts}] NUTZER | {card.get('serialNumber')} ({id_media}): {change_str} → HTTP {r.status_code}")

                                # Aufladungs-Log schreiben wenn rechargeValue gesetzt
                                if r.status_code == 200 and "rechargeValue" in params:
                                    rch_file = os.path.join(data_dir, "recharges.json")
                                    rch_log = []
                                    if os.path.exists(rch_file):
                                        with open(rch_file, encoding="utf-8") as f:
                                            rch_log = json.load(f)
                                    rch_log.append({
                                        "idMedia": id_media,
                                        "serialNumber": card.get("serialNumber"),
                                        "alias": card.get("alias", ""),
                                        "amount": params["rechargeValue"],
                                        "creditBefore": card.get("residualCredit", 0),
                                        "creditAfter": card.get("residualCredit", 0) + params["rechargeValue"],
                                        "timestamp": datetime.now().isoformat(),
                                        "confirmed": False,
                                    })
                                    write_json_atomic(rch_file, rch_log)
                                    print(f"[{ts}] AUFLADUNG | {card.get('serialNumber')}: {params['rechargeValue']} Cent → recharges.json")
                        except Exception as e:
                            result = {"ok": False, "error": str(e)}

                elif action == "users/recharges":
                    # Aufladungs-Log laden
                    rch_file = os.path.join(data_dir, "recharges.json")
                    if os.path.exists(rch_file):
                        with open(rch_file, encoding="utf-8") as f:
                            result = {"ok": True, "recharges": json.load(f)}
                    else:
                        result = {"ok": True, "recharges": []}

                elif action == "users/recharges/confirm":
                    # Aufladung als bestätigt markieren
                    id_media = payload.get("idMedia")
                    ts_match = payload.get("timestamp")
                    if id_media:
                        rch_file = os.path.join(data_dir, "recharges.json")
                        if os.path.exists(rch_file):
                            with open(rch_file, encoding="utf-8") as f:
                                rch_log = json.load(f)
                            for entry in rch_log:
                                if entry.get("idMedia") == id_media:
                                    if ts_match:
                                        if entry.get("timestamp") == ts_match:
                                            entry["confirmed"] = True
                                    else:
                                        entry["confirmed"] = True
                            write_json_atomic(rch_file, rch_log)
                            result = {"ok": True}
                        else:
                            result = {"ok": True}

                elif action == "users/customer_data/save":
                    # Erweiterte Kundenstammdaten speichern (Firma, Adresse, USt-ID, Rechnungs-Flag)
                    id_media = payload.get("idMedia")
                    cdata = payload.get("data", {})
                    if id_media:
                        cd_file = os.path.join(data_dir, "customer_data.json")
                        all_cd = {}
                        if os.path.exists(cd_file):
                            with open(cd_file, encoding="utf-8") as f:
                                all_cd = json.load(f)
                        all_cd[str(id_media)] = cdata
                        write_json_atomic(cd_file, all_cd)
                        result = {"ok": True}
                        ts = datetime.now().strftime("%H:%M:%S")
                        print(f"[{ts}] KUNDE | {id_media}: Stammdaten gespeichert")

                elif action == "users/customer_data/load":
                    # Alle erweiterten Kundenstammdaten laden
                    cd_file = os.path.join(data_dir, "customer_data.json")
                    if os.path.exists(cd_file):
                        with open(cd_file, encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": {}}

                elif action == "users/invoice":
                    # Rechnung generieren: Transaktionen eines Monats als JSON (IV-A + IV-G)
                    id_media = payload.get("idMedia")
                    month = payload.get("month")  # "2026-04"
                    force = payload.get("force", False)  # IV-G: Duplikat-Override
                    if id_media and month:
                        try:
                            year, mon = month.split("-")
                            start = f"{year}-{mon}-01 00:00:00"
                            import calendar
                            last_day = calendar.monthrange(int(year), int(mon))[1]
                            end = f"{year}-{mon}-{last_day:02d} 23:59:59"

                            # IV-G: Duplikat-Schutz — Prüfe ob Rechnung für Kunde+Monat schon existiert
                            inv_dir = os.path.join(data_dir, "invoices", f"{year}-{mon}")
                            existing_invoice = None
                            if os.path.isdir(inv_dir) and not force:
                                for fname in os.listdir(inv_dir):
                                    if not fname.endswith(".json"):
                                        continue
                                    try:
                                        with open(os.path.join(inv_dir, fname), encoding="utf-8") as f:
                                            ex = json.load(f)
                                        if ex.get("idMedia") == id_media and ex.get("month") == month and not ex.get("storno"):
                                            existing_invoice = ex
                                            break
                                    except Exception:
                                        continue
                            if existing_invoice:
                                result = {
                                    "ok": False,
                                    "duplicate": True,
                                    "existing": {
                                        "number": existing_invoice.get("number"),
                                        "generated": existing_invoice.get("generated"),
                                        "total": existing_invoice.get("total_gross", 0),
                                    },
                                    "message": "Rechnung für diesen Kunden und Monat existiert bereits. 'force=true' übergeben, um Storno zu erstellen und neue Rechnung zu generieren."
                                }
                            else:
                                # Ggf. Storno der existierenden Rechnung (wenn force=true)
                                if force and os.path.isdir(inv_dir):
                                    for fname in os.listdir(inv_dir):
                                        if not fname.endswith(".json"):
                                            continue
                                        try:
                                            fpath = os.path.join(inv_dir, fname)
                                            with open(fpath, encoding="utf-8") as f:
                                                ex = json.load(f)
                                            if ex.get("idMedia") == id_media and ex.get("month") == month and not ex.get("storno"):
                                                ex["storno"] = True
                                                ex["storno_at"] = datetime.now().isoformat()
                                                write_json_atomic(fpath, ex)
                                                print(f"[{datetime.now().strftime('%H:%M:%S')}] STORNO | {ex.get('number')} bei Neu-Generierung")
                                        except Exception:
                                            continue

                                r = session.put(
                                    f"{BASE_URL}/statistics",
                                    params={"type": "all_movements"},
                                    json={"start": start, "end": end, "user": id_media},
                                    timeout=30,
                                )
                                if r.status_code == 200:
                                    txs = r.json()
                                    # Kundenstammdaten laden
                                    cd_file = os.path.join(data_dir, "customer_data.json")
                                    cdata = {}
                                    if os.path.exists(cd_file):
                                        with open(cd_file, encoding="utf-8") as f:
                                            all_cd = json.load(f)
                                        cdata = all_cd.get(str(id_media), {})
                                    # Betreiber-Daten laden
                                    op_file = os.path.join(data_dir, "operator.json")
                                    opdata = {}
                                    if os.path.exists(op_file):
                                        with open(op_file, encoding="utf-8") as f:
                                            opdata = json.load(f)
                                    # Rechnungsnummer pro Salon (Prefix konfigurierbar)
                                    inv_prefix = opdata.get("invoicePrefix", "WOS")
                                    inv_counter_file = os.path.join(data_dir, "invoice_counter.json")
                                    counter = {"last": 0}
                                    if os.path.exists(inv_counter_file):
                                        with open(inv_counter_file, encoding="utf-8") as f:
                                            counter = json.load(f)
                                    counter["last"] += 1
                                    inv_nr = f"{inv_prefix}-{year}{mon}-{counter['last']:04d}"
                                    write_json_atomic(inv_counter_file, counter)

                                    # IV-A: Summen berechnen für KPIs + Archiv
                                    total_gross_cent = 0
                                    for t in txs:
                                        if t.get("type") == 9:  # Verkauf
                                            total_gross_cent += t.get("value", 0)
                                    # Netto/MwSt (19%)
                                    mwst_rate = 0.19
                                    total_gross_eur = total_gross_cent / 100
                                    total_net_eur = round(total_gross_eur / (1 + mwst_rate), 2)
                                    total_mwst_eur = round(total_gross_eur - total_net_eur, 2)

                                    invoice_obj = {
                                        "number": inv_nr,
                                        "idMedia": id_media,
                                        "month": month,
                                        "start": start,
                                        "end": end,
                                        "transactions": txs,
                                        "customer": cdata,
                                        "operator": opdata,
                                        "generated": datetime.now().isoformat(),
                                        "total_gross": total_gross_eur,
                                        "total_net": total_net_eur,
                                        "total_mwst": total_mwst_eur,
                                        "mwst_rate": mwst_rate,
                                        "storno": False,
                                    }

                                    # IV-A: In Archiv speichern
                                    os.makedirs(inv_dir, exist_ok=True)
                                    inv_file = os.path.join(inv_dir, f"{inv_nr}.json")
                                    write_json_atomic(inv_file, invoice_obj)

                                    result = {
                                        "ok": True,
                                        "invoice": invoice_obj,
                                    }
                                    ts = datetime.now().strftime("%H:%M:%S")
                                    print(f"[{ts}] RECHNUNG | {inv_nr}: {len(txs)} Transaktionen für {id_media} ({total_gross_eur:.2f}€)")
                                else:
                                    result = {"ok": False, "status": r.status_code}
                        except Exception as e:
                            import traceback
                            result = {"ok": False, "error": str(e), "trace": traceback.format_exc()}

                elif action == "invoices/list":
                    # IV-F: Archiv-Liste — alle Rechnungen aus data/invoices/YYYY-MM/*.json
                    try:
                        inv_root = os.path.join(data_dir, "invoices")
                        all_invoices = []
                        if os.path.isdir(inv_root):
                            for month_dir in sorted(os.listdir(inv_root), reverse=True):
                                mdir = os.path.join(inv_root, month_dir)
                                if not os.path.isdir(mdir):
                                    continue
                                for fname in sorted(os.listdir(mdir), reverse=True):
                                    if not fname.endswith(".json"):
                                        continue
                                    try:
                                        with open(os.path.join(mdir, fname), encoding="utf-8") as f:
                                            inv = json.load(f)
                                        # Nur Zusammenfassung, keine vollen Transaktionen
                                        cust = inv.get("customer") or {}
                                        all_invoices.append({
                                            "number": inv.get("number"),
                                            "month": inv.get("month"),
                                            "idMedia": inv.get("idMedia"),
                                            "customer_id": cust.get("id") or inv.get("idMedia"),  # #16: für Top-10/DATEV
                                            "customer_name": cust.get("firma") or cust.get("name") or "",
                                            "customer_email": cust.get("email", ""),
                                            "total_gross": inv.get("total_gross", 0),
                                            "total_net": inv.get("total_net", 0),
                                            "total_mwst": inv.get("total_mwst", 0),
                                            "generated": inv.get("generated"),
                                            "date": inv.get("date") or inv.get("generated"),  # #16: DATEV-Datum
                                            "storno": inv.get("storno", False),
                                            "tx_count": len(inv.get("transactions") or []),
                                        })
                                    except Exception as e:
                                        print(f"[INVOICES] Fehler beim Lesen von {fname}: {e}")
                                        continue
                        result = {"ok": True, "invoices": all_invoices}
                    except Exception as e:
                        result = {"ok": False, "error": str(e), "invoices": []}

                elif action == "invoices/get":
                    # IV-F: Einzelne Rechnung laden
                    inv_nr = payload.get("number", "")
                    try:
                        inv_root = os.path.join(data_dir, "invoices")
                        found = None
                        if os.path.isdir(inv_root):
                            for month_dir in os.listdir(inv_root):
                                mdir = os.path.join(inv_root, month_dir)
                                if not os.path.isdir(mdir):
                                    continue
                                inv_file = os.path.join(mdir, f"{inv_nr}.json")
                                if os.path.exists(inv_file):
                                    with open(inv_file, encoding="utf-8") as f:
                                        found = json.load(f)
                                    break
                        if found:
                            result = {"ok": True, "invoice": found}
                        else:
                            result = {"ok": False, "error": "Rechnung nicht gefunden"}
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "invoices/pdf":
                    # IV-B: PDF-Export per reportlab
                    inv_nr = payload.get("number", "")
                    try:
                        inv_root = os.path.join(data_dir, "invoices")
                        found = None
                        if os.path.isdir(inv_root):
                            for month_dir in os.listdir(inv_root):
                                mdir = os.path.join(inv_root, month_dir)
                                if not os.path.isdir(mdir):
                                    continue
                                inv_file = os.path.join(mdir, f"{inv_nr}.json")
                                if os.path.exists(inv_file):
                                    with open(inv_file, encoding="utf-8") as f:
                                        found = json.load(f)
                                    break
                        if not found:
                            result = {"ok": False, "error": "Rechnung nicht gefunden"}
                        else:
                            pdf_bytes = generate_invoice_pdf(found)
                            import base64
                            pdf_b64 = base64.b64encode(pdf_bytes).decode("ascii")
                            result = {"ok": True, "pdf_base64": pdf_b64, "filename": f"{inv_nr}.pdf"}
                    except Exception as e:
                        import traceback
                        result = {"ok": False, "error": str(e), "trace": traceback.format_exc()}

                elif action == "invoices/zip":
                    # #16: Alle PDFs eines Monats als ZIP für Steuerberater-Export
                    # Antwortet direkt mit binärem ZIP (nicht JSON) — muss explizit behandelt werden.
                    month_key = payload.get("month", "")
                    if not month_key:
                        result = {"ok": False, "error": "Monat fehlt"}
                    else:
                        try:
                            import zipfile, io
                            inv_root = os.path.join(data_dir, "invoices")
                            mdir = os.path.join(inv_root, month_key)
                            if not os.path.isdir(mdir):
                                result = {"ok": False, "error": f"Keine Rechnungen für {month_key}"}
                            else:
                                # ZIP im Memory bauen
                                zip_buf = io.BytesIO()
                                count = 0
                                with zipfile.ZipFile(zip_buf, "w", zipfile.ZIP_DEFLATED) as zf:
                                    for fname in sorted(os.listdir(mdir)):
                                        if not fname.endswith(".json"):
                                            continue
                                        try:
                                            with open(os.path.join(mdir, fname), encoding="utf-8") as f:
                                                inv = json.load(f)
                                            if inv.get("storno"):
                                                continue  # Stornierte überspringen
                                            pdf_bytes = generate_invoice_pdf(inv)
                                            pdf_name = inv.get("number", fname.replace(".json", "")) + ".pdf"
                                            zf.writestr(pdf_name, pdf_bytes)
                                            count += 1
                                        except Exception as e:
                                            print(f"[INVOICES/ZIP] Fehler bei {fname}: {e}")
                                            continue
                                if count == 0:
                                    result = {"ok": False, "error": "Keine gültigen Rechnungen im Monat"}
                                else:
                                    # Direkt als binäre Antwort senden (nicht als JSON)
                                    zip_bytes = zip_buf.getvalue()
                                    self.send_response(200)
                                    self.send_header("Content-Type", "application/zip")
                                    self.send_header("Content-Disposition", f'attachment; filename="rechnungen-{month_key}.zip"')
                                    self.send_header("Content-Length", str(len(zip_bytes)))
                                    self.end_headers()
                                    self.wfile.write(zip_bytes)
                                    ts = datetime.now().strftime("%H:%M:%S")
                                    print(f"[{ts}] INVOICES/ZIP | {month_key}: {count} Rechnungen, {len(zip_bytes)/1024:.1f} KB")
                                    return  # Wichtig: nicht in den Standard-JSON-Fluss fallen
                        except Exception as e:
                            import traceback
                            result = {"ok": False, "error": str(e), "trace": traceback.format_exc()}

                elif action == "operator/save":
                    # Betreiber-Stammdaten speichern (Briefkopf, Rechnungs-Prefix)
                    op_file = os.path.join(data_dir, "operator.json")
                    write_json_atomic(op_file, payload)
                    result = {"ok": True}
                    ts = datetime.now().strftime("%H:%M:%S")
                    print(f"[{ts}] BETREIBER | Stammdaten gespeichert: {payload.get('firma','')}")

                elif action == "operator/load":
                    # Betreiber-Stammdaten laden
                    op_file = os.path.join(data_dir, "operator.json")
                    if os.path.exists(op_file):
                        with open(op_file, encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": {}}

                elif action == "machines/master/save":
                    # Maschinen-Stammdaten speichern: Kaufdatum, Neupreis, Nutzungsdauer
                    # payload: { "A": {"kaufdatum": "2021-03-15", "neupreis": 4500, "nutzungsdauer_jahre": 10}, ... }
                    mm_file = os.path.join(data_dir, "machine_master.json")
                    write_json_atomic(mm_file, payload)
                    result = {"ok": True}
                    ts = datetime.now().strftime("%H:%M:%S")
                    print(f"[{ts}] MASCHINEN | Stammdaten gespeichert: {len(payload)} Einträge")

                elif action == "machines/master/load":
                    # Maschinen-Stammdaten laden
                    mm_file = os.path.join(data_dir, "machine_master.json")
                    if os.path.exists(mm_file):
                        with open(mm_file, encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": {}}

                elif action == "machines/lifetime-stats":
                    # #18 Lifetime-ROI: aggregierter Umsatz + Zyklen pro Maschinen-Label
                    # aus allen verfügbaren Transaktionen (komplette Historie).
                    # Berücksichtigt daily_cache (bis HISTORY_YEARS zurück).
                    try:
                        # daily_cache ist eine Datei? Nein — wird in stats.json geschrieben.
                        # Wir nutzen die bereits aggregierten Daten aus stats.json wenn vorhanden.
                        stats_file = os.path.join(data_dir, "stats.json")
                        by_label = {}
                        total_cycles_all = 0
                        total_rev_all = 0.0
                        oldest_date = None
                        newest_date = None
                        if os.path.exists(stats_file):
                            with open(stats_file, encoding="utf-8") as f:
                                stats_json = json.load(f)
                            cache = stats_json.get("daily_cache") or []
                            for day in cache:
                                date = day.get("date")
                                if not date:
                                    continue
                                if oldest_date is None or date < oldest_date:
                                    oldest_date = date
                                if newest_date is None or date > newest_date:
                                    newest_date = date
                                for tx in (day.get("transactions") or []):
                                    if not tx.get("is_sale"):
                                        continue
                                    label = tx.get("device_label") or "?"
                                    model = tx.get("device_model") or ""
                                    key = label
                                    if key not in by_label:
                                        by_label[key] = {
                                            "label": label,
                                            "model": model,
                                            "type": tx.get("device_type") or 0,
                                            "umsatz": 0.0,
                                            "zyklen": 0,
                                        }
                                    by_label[key]["umsatz"] += tx.get("value_eur") or 0
                                    by_label[key]["zyklen"] += 1
                                    total_cycles_all += 1
                                    total_rev_all += tx.get("value_eur") or 0
                        # Runden
                        for k in by_label:
                            by_label[k]["umsatz"] = round(by_label[k]["umsatz"], 2)
                        result = {
                            "ok": True,
                            "by_label": by_label,
                            "period": {
                                "oldest": oldest_date,
                                "newest": newest_date,
                            },
                            "totals": {
                                "cycles": total_cycles_all,
                                "revenue": round(total_rev_all, 2),
                            },
                        }
                    except Exception as e:
                        result = {"ok": False, "error": str(e)}

                elif action == "happyhour/save":
                    # Happy Hour Konfiguration speichern
                    hh_file = os.path.join(data_dir, "happyhour.json")
                    write_json_atomic(hh_file, payload)
                    result = {"ok": True}
                    ts = datetime.now().strftime("%H:%M:%S")
                    status = "AKTIV" if payload.get("active") else "DEAKTIVIERT"
                    print(f"[{ts}] HAPPY HOUR | {status}: {payload.get('days',[])} {payload.get('start','')}-{payload.get('end','')} {payload.get('discount',0)}%")

                elif action == "happyhour/load":
                    # Happy Hour Konfiguration laden
                    hh_file = os.path.join(data_dir, "happyhour.json")
                    if os.path.exists(hh_file):
                        with open(hh_file, encoding="utf-8") as f:
                            result = {"ok": True, "data": json.load(f)}
                    else:
                        result = {"ok": True, "data": {}}

                self.send_response(200)
                self.send_header("Content-Type", "application/json")
                self.send_header("Access-Control-Allow-Origin", "*")
                self.end_headers()
                self.wfile.write(json.dumps(result).encode())

            except Exception as e:
                self.send_response(500)
                self.send_header("Content-Type", "application/json")
                self.end_headers()
                self.wfile.write(json.dumps({"ok": False, "error": str(e)}).encode())
            return

        super().do_POST()

    def do_OPTIONS(self):
        """CORS preflight"""
        self.send_response(200)
        self.send_header("Access-Control-Allow-Origin", "*")
        self.send_header("Access-Control-Allow-Methods", "POST, OPTIONS")
        self.send_header("Access-Control-Allow-Headers", "Content-Type")
        self.end_headers()


def main():
    print("=" * 58)
    print("  WashOS - Server + Poller v5")
    print("  Bloomest + Miele Move + Seko Dosierung")
    print(f"  http://localhost:{PORT}/frontend/washos-dashboard.html")
    print(f"  Historie: letzte {HISTORY_YEARS} Jahre")
    print("=" * 58)
    # HTTP-Root = washos/ (Elternverzeichnis von server/)
    root_dir = os.path.dirname(script_dir)
    os.chdir(root_dir)

    # Bloomest-Credentials aus settings.json laden
    bloomest_ready = load_bloomest_credentials(data_dir)
    bloomest_active = False

    if not bloomest_ready:
        print()
        print("  [!] Bloomest: Keine Credentials in settings.json")
        print("  [!] Bitte in den Einstellungen eintragen. Bloomest-Poller pausiert.")
        print()
    elif not login():
        print()
        print("  [!] Bloomest: Login fehlgeschlagen.")
        print("  [!] Credentials pruefen. Bloomest-Poller pausiert.")
        print()
    else:
        bloomest_active = True
        laundries = api_get("laundries")
        laundry   = next((l for l in laundries
                          if l.get("lavapiuLaundryIdentifier") == ID_LCS), laundries[0])
        # Verify/update LAUNDRY_ID
        global LAUNDRY_ID
        actual_id = laundry.get("idLaundry")
        if actual_id and actual_id != LAUNDRY_ID:
            print(f"  LAUNDRY_ID aktualisiert: {LAUNDRY_ID} → {actual_id}")
            LAUNDRY_ID = actual_id
        print(f"  {laundry.get('description')} – {laundry.get('address')}, {laundry.get('city')}")
        print(f"  idLaundry: {LAUNDRY_ID}")
        print()

        diagnose_stats()
        threading.Thread(target=live_poller, args=(laundry,), daemon=True).start()
        threading.Thread(target=stats_poller, args=(laundry,), daemon=True).start()

    # Miele Move Poller
    if MIELE_ENABLED:
        print("Miele Move Poller wird gestartet...")
        try:
            import importlib
            miele_mod = importlib.import_module("washos-miele")
            miele = miele_mod.start_miele_thread(output_dir=data_dir)
            print("  Miele-Thread gestartet.\n")
        except ImportError as e:
            print(f"  washos-miele.py nicht gefunden — Miele-Daten deaktiviert. ({e})\n")
        except Exception as e:
            print(f"  Miele-Fehler: {e}\n")

    # Seko Dosierpumpen-Poller
    if SEKO_ENABLED:
        print("Seko Dosierpumpen-Poller wird gestartet...")
        try:
            import importlib
            seko_mod = importlib.import_module("washos-seko")
            seko = seko_mod.start_seko_thread(output_dir=data_dir)
            print("  Seko-Thread gestartet.\n")
        except ImportError as e:
            print(f"  washos-seko.py nicht gefunden — Seko-Daten deaktiviert. ({e})\n")
        except Exception as e:
            print(f"  Seko-Fehler: {e}\n")

    # Wetter-Poller (Open-Meteo, Lüneburg)
    if WEATHER_ENABLED:
        print("Open-Meteo Wetter-Poller wird gestartet...")
        try:
            import importlib
            weather_mod = importlib.import_module("washos-weather")
            weather = weather_mod.start_weather_thread(output_dir=data_dir)
            print("  Weather-Thread gestartet.\n")
        except ImportError as e:
            print(f"  washos-weather.py nicht gefunden — Wetter-Daten deaktiviert. ({e})\n")
        except Exception as e:
            print(f"  Weather-Fehler: {e}\n")

    # Happy Hour Timer
    def happy_hour_checker():
        """Prüft jede 60s ob Happy Hour aktiv ist und passt Preise an."""
        hh_active_state = False  # Tracking ob HH gerade läuft
        original_prices = {}    # Original-Preise vor HH
        while True:
            try:
                hh_file = os.path.join(data_dir, "happyhour.json")
                if os.path.exists(hh_file):
                    with open(hh_file, encoding="utf-8") as f:
                        hh = json.load(f)
                    if hh.get("active"):
                        now = datetime.now()
                        current_day = now.weekday()  # 0=Mo ... 6=So
                        # Bloomest: 0=So, 1=Mo ... 6=Sa → konvertieren
                        bloomest_day = (current_day + 1) % 7
                        current_time = now.strftime("%H:%M")
                        in_window = (bloomest_day in hh.get("days", [])
                                     and hh.get("start", "") <= current_time < hh.get("end", ""))

                        if in_window and not hh_active_state:
                            # HH startet: Preise reduzieren
                            ts = now.strftime("%H:%M:%S")
                            print(f"[{ts}] HAPPY HOUR | START — {hh.get('discount', 20)}% Rabatt")
                            hh_active_state = True
                            # Original-Preise sichern + reduzierte setzen
                            # (Implementierung der Preisänderung via PUT /laundries
                            #  kommt im nächsten Schritt — aktuell nur Log)
                        elif not in_window and hh_active_state:
                            # HH endet: Preise zurücksetzen
                            ts = now.strftime("%H:%M:%S")
                            print(f"[{ts}] HAPPY HOUR | ENDE — Preise zurückgesetzt")
                            hh_active_state = False
            except Exception as e:
                pass
            time.sleep(60)

    threading.Thread(target=happy_hour_checker, daemon=True).start()
    print("Happy Hour Timer gestartet.\n")

    # Daily-Digest-Scheduler (v1.0: Push + Daily-Digest)
    if _PUSH_LIB_AVAILABLE:
        get_vapid_keys()  # Keys initialisieren / laden
        threading.Thread(target=digest_scheduler_loop, daemon=True).start()
        print("Daily-Digest-Scheduler gestartet.\n")
    else:
        print("Push/Digest nicht verfügbar — installiere pywebpush py-vapid für volle v1.0-Features.\n")

    def open_browser():
        time.sleep(3)
        webbrowser.open(f"http://localhost:{PORT}/frontend/washos-dashboard.html")
    threading.Thread(target=open_browser, daemon=True).start()

    print(f"Webserver Port {PORT}... Ctrl+C zum Stoppen.\n")
    try:
        HTTPServer(("", PORT), DashboardHandler).serve_forever()
    except KeyboardInterrupt:
        print("\nServer gestoppt.")
    except OSError as e:
        if "10048" in str(e) or "already in use" in str(e).lower():
            print(f"Port {PORT} belegt – Task-Manager > python.exe beenden.")
            input("Enter...")
        else:
            raise


if __name__ == "__main__":
    main()
