From 622f42da5fc64f5668d6f2eec6086389b027f188 Mon Sep 17 00:00:00 2001 From: root Date: Sun, 15 Mar 2026 11:34:48 +0700 Subject: [PATCH] Hausmeister: Konversations-History, memory_suggest/read/search Tools, Events-Integration --- homelab-ai-bot/context.py | 45 ++++++++++++++++++++++++ homelab-ai-bot/llm.py | 61 +++++++++++++++++++++++++++++++-- homelab-ai-bot/memory_client.py | 10 ++++++ homelab-ai-bot/telegram_bot.py | 2 +- 4 files changed, 115 insertions(+), 3 deletions(-) diff --git a/homelab-ai-bot/context.py b/homelab-ai-bot/context.py index 760124bd..7d7741a3 100644 --- a/homelab-ai-bot/context.py +++ b/homelab-ai-bot/context.py @@ -281,6 +281,48 @@ def _tool_get_feed_stats() -> str: return f"RSS Manager Fehler: {e}" +def _tool_memory_read(scope=""): + import memory_client + items = memory_client.get_active_memory() + if scope: + items = [i for i in items if i.get("scope") == scope] + if not items: + return "Keine Memory-Eintraege gefunden." + lines = [] + for i in items: + lines.append(f"[{i['scope']}/{i['kind']}] {i['content']}") + return "\n".join(lines) + + +def _tool_memory_suggest(scope, kind, content): + import memory_client + result = memory_client._post("/memory", { + "scope": scope, + "kind": kind, + "content": content, + "source": "bot-suggest", + "status": "candidate", + }) + if result and result.get("duplicate"): + return f"Bereits gespeichert (ID {result.get('existing_id')})." + if result and result.get("ok"): + return f"Vorschlag gespeichert als Kandidat (Fingerprint: {result.get('fingerprint', '?')[:12]}...)." + return "Konnte Vorschlag nicht speichern." + + +def _tool_session_search(query): + import memory_client + result = memory_client._get("/sessions/search", {"q": query, "limit": 20}) + if not result or not result.get("results"): + return f"Keine Ergebnisse fuer '{query}'." + lines = [f"Suche '{query}': {result['count']} Treffer"] + for r in result["results"][:10]: + role = r.get("role", "?") + content = (r.get("content") or "")[:150] + lines.append(f" [{role}] {content}") + return "\n".join(lines) + + def get_tool_handlers() -> dict: """Registry: Tool-Name -> Handler-Funktion. Wird von llm.ask_with_tools() genutzt.""" return { @@ -304,4 +346,7 @@ def get_tool_handlers() -> dict: "search_mail": lambda query, days=30: _tool_search_mail(query, days=days), "get_todays_mails": lambda: _tool_get_todays_mails(), "get_smart_mail_digest": lambda hours=24: _tool_get_smart_mail_digest(hours=hours), + "memory_read": lambda scope="": _tool_memory_read(scope), + "memory_suggest": lambda scope, kind, content: _tool_memory_suggest(scope, kind, content), + "session_search": lambda query: _tool_session_search(query), } diff --git a/homelab-ai-bot/llm.py b/homelab-ai-bot/llm.py index d6e389ea..be1692fb 100644 --- a/homelab-ai-bot/llm.py +++ b/homelab-ai-bot/llm.py @@ -242,6 +242,50 @@ TOOLS = [ }, }, }, + { + "type": "function", + "function": { + "name": "memory_read", + "description": "Liest persistente Gedaechtnis-Eintraege (Fakten ueber User, Umgebung, Projekte). Nutze dieses Tool wenn du wissen willst was du dir gemerkt hast.", + "parameters": { + "type": "object", + "properties": { + "scope": {"type": "string", "description": "Filter: user, environment, project (leer = alle)", "default": ""}, + }, + "required": [], + }, + }, + }, + { + "type": "function", + "function": { + "name": "memory_suggest", + "description": "Schlage vor, einen neuen Fakt zu merken. Nutze dieses Tool PROAKTIV wenn der User etwas sagt das dauerhaft relevant ist (Vorlieben, Gewohnheiten, Umgebungsfakten). Der Vorschlag wird als Kandidat gespeichert.", + "parameters": { + "type": "object", + "properties": { + "scope": {"type": "string", "enum": ["user", "environment", "project"], "description": "Kategorie des Fakts"}, + "kind": {"type": "string", "enum": ["fact", "preference", "rule", "note"], "description": "Art des Eintrags"}, + "content": {"type": "string", "description": "Der Fakt der gemerkt werden soll (kurz, praezise)"}, + }, + "required": ["scope", "kind", "content"], + }, + }, + }, + { + "type": "function", + "function": { + "name": "session_search", + "description": "Durchsucht vergangene Gespraeche nach Stichworten. Nutze dieses Tool wenn der User fragt 'was haben wir besprochen', 'erinnerst du dich', 'letzte Woche' oder aehnlich.", + "parameters": { + "type": "object", + "properties": { + "query": {"type": "string", "description": "Suchbegriffe (Woerter mit Leerzeichen getrennt)"}, + }, + "required": ["query"], + }, + }, + }, ] @@ -287,10 +331,11 @@ def ask(question: str, context: str) -> str: return f"LLM-Fehler: {e}" -def ask_with_tools(question: str, tool_handlers: dict) -> str: +def ask_with_tools(question: str, tool_handlers: dict, session_id: str = None) -> str: """Freitext-Frage mit automatischem Tool-Calling. tool_handlers: dict von tool_name -> callable(**kwargs) -> str + session_id: aktive Session fuer Konversations-History """ api_key = _get_api_key() if not api_key: @@ -305,9 +350,21 @@ def ask_with_tools(question: str, tool_handlers: dict) -> str: messages = [ {"role": "system", "content": SYSTEM_PROMPT + memory_block}, - {"role": "user", "content": question}, ] + if session_id: + try: + import memory_client + history = memory_client.get_session_messages(session_id, limit=10) + for msg in history: + if msg.get("role") in ("user", "assistant") and msg.get("content"): + messages.append({"role": msg["role"], "content": msg["content"]}) + except Exception: + pass + + if not any(m.get("content") == question for m in messages): + messages.append({"role": "user", "content": question}) + try: for _round in range(MAX_TOOL_ROUNDS): data = _call_openrouter(messages, api_key, use_tools=True) diff --git a/homelab-ai-bot/memory_client.py b/homelab-ai-bot/memory_client.py index 5e558d93..d9316b89 100644 --- a/homelab-ai-bot/memory_client.py +++ b/homelab-ai-bot/memory_client.py @@ -111,3 +111,13 @@ def format_memory_for_prompt(items: list[dict]) -> str: lines.append(f"{prefix} {item['content']}") lines.append("=== ENDE GEDAECHTNIS ===") return "\n".join(lines) + + +def get_session_messages(session_id: str, limit: int = 10) -> list[dict]: + """Holt die letzten N Messages einer Session fuer den LLM-Kontext.""" + if not session_id: + return [] + result = _get(f"/sessions/{session_id}/messages", {"limit": limit}) + if result and "messages" in result: + return result["messages"] + return [] diff --git a/homelab-ai-bot/telegram_bot.py b/homelab-ai-bot/telegram_bot.py index 09078370..7387468b 100644 --- a/homelab-ai-bot/telegram_bot.py +++ b/homelab-ai-bot/telegram_bot.py @@ -314,7 +314,7 @@ async def handle_message(update: Update, ctx: ContextTypes.DEFAULT_TYPE): await update.message.reply_text("🤔 Denke nach...") try: handlers = context.get_tool_handlers() - answer = llm.ask_with_tools(text, handlers) + answer = llm.ask_with_tools(text, handlers, session_id=session_id) if session_id: memory_client.log_message(session_id, "assistant", answer) await update.message.reply_text(answer[:4000], reply_markup=KEYBOARD)