- smart-home/HEIZUNG.md: komplette Doku zur Ölbrenner-Erkennung (brennerstarts.py), Schwellwerte, Rekonstruktion, Dashboard-Panels, Troubleshooting - smart-home/STATE.md: klare Tabelle mit allen Dienst-URLs (public+intern) und Logins — Grafana/ioBroker/InfluxDB laufen ALLE in CT 143 auf pve-mu-3 - homelab.conf: CT_143_MU3 Beschreibung korrigiert (war "Raspi-Broker"), neue Variablen GRAFANA_URL_*/IOBROKER_URL_*/INFLUX_URL_INTERN + User/Pass (=PW_DEFAULT) damit beim nächsten Mal keine Fragen aufkommen - smart-home/scripts/: alle relevanten Skripte ins Repo: grafana_shot.js (Puppeteer-Login mit admin/astral66) add_month_panel.py (idempotente Monatskacheln im Heizung-Dashboard) brenner_rekonstruktion.py + cleanup_reconstruct.py + check_april.py patch_brenner.sh (Anpassung der Schwellwerte nach Regelkurven-Änderung) - MASTER_INDEX.md: Verweis auf HEIZUNG.md Made-with: Cursor
215 lines
7.4 KiB
Python
215 lines
7.4 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Rekonstruiert brennerstarts/brennerstatus/brennerlaufzeit aus VL-Rohdaten fuer
|
|
den Zeitraum, in dem die Live-Erkennung wegen zu hoher Schwelle (55 C) nichts
|
|
mehr erfasst hat.
|
|
|
|
Verwendet die AKTUELLEN Schwellen (MIN_TEMP=30, Steigung 0.3/3min, 0.1/1min,
|
|
-0.15/3min AUS, Cooldown 10 min). Schreibt mit historischen Timestamps.
|
|
|
|
Dry-run by default. --commit zum tatsaechlichen Schreiben.
|
|
"""
|
|
import argparse
|
|
import json
|
|
from datetime import datetime, timedelta, timezone
|
|
from urllib.parse import quote
|
|
from urllib.request import Request, urlopen
|
|
|
|
INFLUX = 'http://localhost:8086'
|
|
DB = 'iobroker'
|
|
VL_MEASUREMENT = 'mqtt.0.Oelkessel.Oelkessel_VL.Vorlauf'
|
|
|
|
MIN_TEMP_BRENNER = 30
|
|
STEIGUNG_AN = 0.3
|
|
STEIGUNG_1MIN = 0.1
|
|
STEIGUNG_AUS = -0.15
|
|
COOLDOWN_SEC = 10 * 60
|
|
BRENNER_RATE_LH = 1.89
|
|
|
|
# Live-Service fing am 2026-04-20 21:45 MESZ an; davor war Erkennung tot
|
|
# ab dem 06.04. mittags (letzter AUS war 2026-04-06 03:24 UTC = 05:24 MESZ)
|
|
START_UTC = datetime(2026, 4, 6, 4, 0, tzinfo=timezone.utc) # 06:00 MESZ
|
|
END_UTC = datetime(2026, 4, 20, 19, 44, tzinfo=timezone.utc) # 21:44 MESZ
|
|
|
|
STEP_SEC = 30
|
|
|
|
|
|
def influx_query(q):
|
|
url = f'{INFLUX}/query?db={DB}&epoch=ns&q={quote(q)}'
|
|
with urlopen(url, timeout=60) as r:
|
|
return json.loads(r.read().decode())
|
|
|
|
|
|
def fetch_vl(start_utc, end_utc):
|
|
q = (
|
|
f'SELECT value FROM "{VL_MEASUREMENT}" '
|
|
f"WHERE time >= '{start_utc.strftime('%Y-%m-%dT%H:%M:%SZ')}' "
|
|
f"AND time <= '{end_utc.strftime('%Y-%m-%dT%H:%M:%SZ')}' "
|
|
f'ORDER BY time ASC'
|
|
)
|
|
data = influx_query(q)
|
|
series = data['results'][0].get('series', [])
|
|
if not series:
|
|
return []
|
|
return [(int(t), float(v)) for t, v in series[0]['values'] if v is not None]
|
|
|
|
|
|
def temp_at_or_before(samples, i, target_ns):
|
|
"""Binary-search-ish: nimm letzten Sample <= target_ns beginnend bei i rueckwaerts."""
|
|
j = i
|
|
while j > 0 and samples[j][0] > target_ns:
|
|
j -= 1
|
|
return samples[j][1] if samples[j][0] <= target_ns else None
|
|
|
|
|
|
def reconstruct(samples):
|
|
"""Events liste [(ts_ns, 'an'|'aus', laufzeit_s_at_aus), ...]"""
|
|
events = []
|
|
brenner_laeuft = False
|
|
start_ts_ns = None
|
|
last_start_ns = None
|
|
|
|
if not samples:
|
|
return events
|
|
# Iteriere in Schritten zeitlich gleichmaessig
|
|
t0 = samples[0][0]
|
|
t1 = samples[-1][0]
|
|
cur = t0
|
|
step = STEP_SEC * 1_000_000_000
|
|
three_min = 3 * 60 * 1_000_000_000
|
|
one_min = 1 * 60 * 1_000_000_000
|
|
cooldown = COOLDOWN_SEC * 1_000_000_000
|
|
|
|
while cur <= t1:
|
|
# finde index <= cur
|
|
# linear forward search (samples sorted)
|
|
pass
|
|
break
|
|
# Stattdessen: Pro-Sample-Iteration (einfacher, robust)
|
|
# An jedem Sample i werten wir ueber das 3min-Fenster aus.
|
|
for i in range(len(samples)):
|
|
ts_now, temp_now = samples[i]
|
|
target_3m = ts_now - three_min
|
|
target_1m = ts_now - one_min
|
|
# finde temp_vor_3m, temp_vor_1m
|
|
j3 = i
|
|
while j3 > 0 and samples[j3][0] > target_3m:
|
|
j3 -= 1
|
|
j1 = i
|
|
while j1 > 0 and samples[j1][0] > target_1m:
|
|
j1 -= 1
|
|
if samples[j3][0] > ts_now - int(3.5 * 60 * 1_000_000_000):
|
|
pass # ok
|
|
temp_vor_3m = samples[j3][1]
|
|
temp_vor_1m = samples[j1][1]
|
|
# Ueberpruefe genug Spreizung Daten
|
|
if ts_now - samples[j3][0] < 2 * 60 * 1_000_000_000:
|
|
# weniger als 2 min Historie -> ueberspringen
|
|
continue
|
|
|
|
steigung_3m = temp_now - temp_vor_3m
|
|
steigung_1m = temp_now - temp_vor_1m
|
|
|
|
if not brenner_laeuft:
|
|
if (
|
|
temp_now > MIN_TEMP_BRENNER
|
|
and steigung_3m >= STEIGUNG_AN
|
|
and steigung_1m >= STEIGUNG_1MIN
|
|
):
|
|
if last_start_ns is None or (ts_now - last_start_ns) > cooldown:
|
|
brenner_laeuft = True
|
|
start_ts_ns = ts_now
|
|
last_start_ns = ts_now
|
|
events.append((ts_now, 'an', None))
|
|
else:
|
|
if steigung_3m <= STEIGUNG_AUS:
|
|
laufzeit_s = (ts_now - start_ts_ns) / 1_000_000_000
|
|
events.append((ts_now, 'aus', laufzeit_s))
|
|
brenner_laeuft = False
|
|
start_ts_ns = None
|
|
return events
|
|
|
|
|
|
def write_line(line, dry=True):
|
|
if dry:
|
|
return True
|
|
url = f'{INFLUX}/write?db={DB}&precision=ns'
|
|
req = Request(url, data=line.encode(), method='POST')
|
|
with urlopen(req, timeout=30) as r:
|
|
return r.status == 204
|
|
|
|
|
|
def main():
|
|
ap = argparse.ArgumentParser()
|
|
ap.add_argument('--commit', action='store_true')
|
|
ap.add_argument('--start', default=START_UTC.isoformat())
|
|
ap.add_argument('--end', default=END_UTC.isoformat())
|
|
args = ap.parse_args()
|
|
|
|
start = datetime.fromisoformat(args.start)
|
|
end = datetime.fromisoformat(args.end)
|
|
|
|
print(f'Fetch VL {start} -> {end}')
|
|
samples = fetch_vl(start, end)
|
|
print(f' {len(samples)} Samples, erster {datetime.fromtimestamp(samples[0][0]/1e9, timezone.utc)}, letzter {datetime.fromtimestamp(samples[-1][0]/1e9, timezone.utc)}')
|
|
print(f' Min {min(v for _,v in samples):.1f} C, Max {max(v for _,v in samples):.1f} C')
|
|
|
|
events = reconstruct(samples)
|
|
print(f'\nErkannte Events: {len(events)}')
|
|
ans = [e for e in events if e[1] == 'an']
|
|
auss = [e for e in events if e[1] == 'aus']
|
|
print(f' {len(ans)} Starts, {len(auss)} Stopps')
|
|
total_s = sum(e[2] for e in auss)
|
|
print(f' Gesamt-Laufzeit: {total_s/3600:.2f} h -> {total_s/3600*BRENNER_RATE_LH:.2f} L')
|
|
|
|
# Erste / letzte Events
|
|
for label, lst in (('erste 5 Starts', ans[:5]), ('letzte 5 Starts', ans[-5:]),
|
|
('erste 5 Stopps', auss[:5]), ('letzte 5 Stopps', auss[-5:])):
|
|
print(f'\n{label}:')
|
|
for e in lst:
|
|
ts = datetime.fromtimestamp(e[0]/1e9, timezone.utc).astimezone()
|
|
if e[1] == 'aus':
|
|
print(f' {ts.strftime("%Y-%m-%d %H:%M:%S %z")} AUS {e[2]/60:.1f} min')
|
|
else:
|
|
print(f' {ts.strftime("%Y-%m-%d %H:%M:%S %z")} AN')
|
|
|
|
# Tagesbilanzen
|
|
print('\nTagesbilanz:')
|
|
per_day = {}
|
|
for _, _, _ in [(0,0,0)]:
|
|
pass
|
|
# Zaehle Starts und Laufzeit pro Tag (lokaler Tag Europe/Berlin ≈ UTC+2 in April)
|
|
TZ = timezone(timedelta(hours=2))
|
|
for ts_ns, typ, laufzeit in events:
|
|
d = datetime.fromtimestamp(ts_ns/1e9, TZ).date()
|
|
if d not in per_day:
|
|
per_day[d] = {'starts': 0, 'laufzeit_s': 0.0}
|
|
if typ == 'an':
|
|
per_day[d]['starts'] += 1
|
|
elif typ == 'aus':
|
|
per_day[d]['laufzeit_s'] += laufzeit
|
|
for d in sorted(per_day):
|
|
s = per_day[d]
|
|
h = s['laufzeit_s'] / 3600
|
|
print(f' {d} Starts={s["starts"]:3d} Laufzeit={h:5.2f}h Verbrauch={h*BRENNER_RATE_LH:5.2f}L')
|
|
|
|
# Schreiben
|
|
if args.commit:
|
|
print('\n--- commit: schreibe nach InfluxDB ---')
|
|
n = 0
|
|
for ts_ns, typ, laufzeit in events:
|
|
if typ == 'an':
|
|
write_line(f'brennerstarts value=1 {ts_ns}', dry=False)
|
|
write_line(f'brennerstatus value=1 {ts_ns}', dry=False)
|
|
n += 2
|
|
elif typ == 'aus':
|
|
write_line(f'brennerlaufzeit value={laufzeit} {ts_ns}', dry=False)
|
|
write_line(f'brennerstatus value=0 {ts_ns}', dry=False)
|
|
n += 2
|
|
print(f' {n} Zeilen geschrieben')
|
|
else:
|
|
print('\n(dry-run, nichts geschrieben; mit --commit ausfuehren)')
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|