Hausmeister: Session-Recovery nach Restart + robustes Topic-Matching mit Aliassen

This commit is contained in:
root 2026-03-15 12:23:51 +07:00
parent 74ac5a2ce5
commit 5e1b0efefd

View file

@ -22,6 +22,43 @@ _token = None
SESSION_TIMEOUT = 1800 # 30 Minuten Inaktivitaet = neue Session SESSION_TIMEOUT = 1800 # 30 Minuten Inaktivitaet = neue Session
_active_sessions: dict[str, dict] = {} # channel_key -> {id, last_activity} _active_sessions: dict[str, dict] = {} # channel_key -> {id, last_activity}
TOPIC_ALIASES = {
"container": ["containers", "lxc", "lxc-container", "lxc container", "ct"],
"server": ["servers", "proxmox", "pve", "host", "hosts"],
"backup": ["backups", "pbs", "snapshot", "snapshots"],
"projekt": ["projekte", "project", "projects"],
"mail": ["mails", "email", "e-mail"],
}
def _normalize(text: str) -> str:
"""Normalisiert Text fuer robustes Matching."""
t = text.lower().strip()
for old, new in [("ä", "ae"), ("ö", "oe"), ("ü", "ue"), ("ß", "ss")]:
t = t.replace(old, new)
t = t.replace("-", " ").replace("_", " ")
return t
def _topic_matches(text: str, topic: str) -> bool:
"""Prueft ob ein Topic im normalisierten Text vorkommt (inkl. Aliase)."""
norm_text = _normalize(text)
norm_topic = _normalize(topic)
if norm_topic in norm_text:
return True
aliases = TOPIC_ALIASES.get(norm_topic, [])
for alias in aliases:
if _normalize(alias) in norm_text:
return True
for base, alias_list in TOPIC_ALIASES.items():
if norm_topic in [_normalize(a) for a in alias_list] or norm_topic == base:
if base in norm_text:
return True
for a in alias_list:
if _normalize(a) in norm_text:
return True
return False
def _ensure_config(): def _ensure_config():
global _cfg, _base_url, _token global _cfg, _base_url, _token
@ -66,14 +103,39 @@ def _get(path: str, params: dict = None) -> Optional[dict]:
return None return None
def _patch(path: str, data: dict) -> Optional[dict]:
_ensure_config()
if not _base_url:
return None
try:
r = requests.patch(f"{_base_url}{path}", json=data, headers=_headers(), timeout=5)
if r.ok:
return r.json()
log.warning("Memory API PATCH %s: %s %s", path, r.status_code, r.text[:200])
except Exception as e:
log.warning("Memory API PATCH %s: %s", path, e)
return None
def get_or_create_session(channel_key: str, source: str = "telegram") -> Optional[str]: def get_or_create_session(channel_key: str, source: str = "telegram") -> Optional[str]:
"""Gibt eine aktive Session-ID zurueck oder erstellt eine neue.""" """Gibt eine aktive Session-ID zurueck oder erstellt eine neue.
Ueberlebt Bot-Restarts durch API-Lookup der letzten Session."""
now = time.time() now = time.time()
cached = _active_sessions.get(channel_key) cached = _active_sessions.get(channel_key)
if cached and (now - cached["last_activity"]) < SESSION_TIMEOUT: if cached and (now - cached["last_activity"]) < SESSION_TIMEOUT:
cached["last_activity"] = now cached["last_activity"] = now
return cached["id"] return cached["id"]
# Nach Restart: letzte Session vom API holen
latest = _get("/sessions/latest", {"channel_key": channel_key})
if latest and latest.get("id"):
last_at = latest.get("last_activity_at", 0)
if (now - last_at) < SESSION_TIMEOUT:
_active_sessions[channel_key] = {"id": latest["id"], "last_activity": now}
_patch(f"/sessions/{latest['id']}", {})
log.info("Session wiederhergestellt nach Restart: %s", latest["id"][:12])
return latest["id"]
result = _post("/sessions", {"source": source, "channel_key": channel_key}) result = _post("/sessions", {"source": source, "channel_key": channel_key})
if result and "id" in result: if result and "id" in result:
_active_sessions[channel_key] = {"id": result["id"], "last_activity": now} _active_sessions[channel_key] = {"id": result["id"], "last_activity": now}
@ -150,29 +212,28 @@ def get_session_summary(session_id: str, limit: int = 20, topic: str = None) ->
return "Keine Themen in dieser Session." return "Keine Themen in dieser Session."
if topic: if topic:
topic_lower = topic.lower()
matching = [] matching = []
other_topics = [] other_topics = []
for q, a in exchanges: for q, a in exchanges:
combined = (q + " " + (a or "")).lower() combined = q + " " + (a or "")
if topic_lower in combined: if _topic_matches(combined, topic):
matching.append((q, a)) matching.append((q, a))
else: else:
other_topics.append(q[:80]) other_topics.append(q[:80])
lines = [] lines = []
if matching: if matching:
lines.append("Zum Thema '" + topic + "' (" + str(len(matching)) + " Punkte):") lines.append("Zum Thema '" + topic + "' (" + str(len(matching)) + " Treffer):")
for i, (q, a) in enumerate(matching, 1): for i, (q, a) in enumerate(matching, 1):
line = str(i) + ". Frage: " + q line = str(i) + ". Frage: " + q
if a: if a:
line += "\n Antwort: " + a line += "\n Antwort: " + a
lines.append(line) lines.append(line)
else: else:
lines.append("Zum Thema '" + topic + "' wurde nichts direkt besprochen.") lines.append("Zum Thema '" + topic + "' wurde in dieser Session nichts direkt besprochen.")
if other_topics: if other_topics:
lines.append("\nSonstige Themen: " + ", ".join(other_topics)) lines.append("\nSonstige Themen der Session: " + ", ".join(other_topics))
return "\n".join(lines) return "\n".join(lines)
lines = ["Session (" + str(len(exchanges)) + " Themen):"] lines = ["Session (" + str(len(exchanges)) + " Themen):"]