feat: sichtbarer Fortschritt + echter Abbruch fuer lange Suchen

- Alle 30s Statusmeldung waehrend laufender Suche
- Textbefehl "abbruch"/"stop" cancelt den laufenden Such-Task
- Aufraeumen via Task-Registry pro Chat
- Sauberes Cancel-Handling ohne Tracebacks
This commit is contained in:
Homelab Cursor 2026-03-20 22:35:49 +01:00
parent 8e69189781
commit d189f8adac

View file

@ -117,6 +117,7 @@ log = logging.getLogger("hausmeister")
ALLOWED_CHAT_IDS: set[int] = set()
CHAT_ID: int | None = None
ACTIVE_LLM_TASKS: dict[int, asyncio.Task] = {}
def _load_token_and_chat():
@ -167,6 +168,9 @@ async def cmd_status(update: Update, ctx: ContextTypes.DEFAULT_TYPE):
if len(text) > 4000:
text = text[:4000] + "\n..."
await update.message.reply_text(text)
except asyncio.CancelledError:
log.info("Freitext-Lauf abgebrochen")
return
except Exception as e:
await update.message.reply_text(f"Fehler: {e}")
@ -409,7 +413,20 @@ async def handle_voice(update: Update, ctx: ContextTypes.DEFAULT_TYPE):
context.last_suggest_result = {"type": None}
context.set_source_type("telegram_voice")
handlers = context.get_tool_handlers(session_id=session_id)
answer = await asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id)
llm_task = asyncio.create_task(
asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id)
)
ACTIVE_LLM_TASKS[update.effective_chat.id] = llm_task
waited = 0
while not llm_task.done():
await asyncio.sleep(30)
waited += 30
if not llm_task.done():
await update.message.reply_text("⏳ Suche laeuft noch (" + str(waited) + "s)...")
answer = await llm_task
if session_id:
memory_client.log_message(session_id, "user", text)
memory_client.log_message(session_id, "assistant", answer)
@ -711,6 +728,16 @@ async def handle_message(update: Update, ctx: ContextTypes.DEFAULT_TYPE):
if not text:
return
if text.strip().lower() in ("abbruch", "stop", "stopp", "cancel"):
chat_id = update.effective_chat.id
task = ACTIVE_LLM_TASKS.get(chat_id)
if task and not task.done():
task.cancel()
await update.message.reply_text("🛑 Abgebrochen.")
else:
await update.message.reply_text("Kein laufender Suchlauf.")
return
cmd = BUTTON_MAP.get(text)
if cmd == "status":
return await cmd_status(update, ctx)
@ -735,7 +762,20 @@ async def handle_message(update: Update, ctx: ContextTypes.DEFAULT_TYPE):
context.last_suggest_result = {"type": None}
context.set_source_type("telegram_text")
handlers = context.get_tool_handlers(session_id=session_id)
answer = await asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id)
llm_task = asyncio.create_task(
asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id)
)
ACTIVE_LLM_TASKS[update.effective_chat.id] = llm_task
waited = 0
while not llm_task.done():
await asyncio.sleep(30)
waited += 30
if not llm_task.done():
await update.message.reply_text("Suche laeuft noch (" + str(waited) + "s)...")
answer = await llm_task
if session_id:
memory_client.log_message(session_id, "user", text)
memory_client.log_message(session_id, "assistant", answer)
@ -744,9 +784,14 @@ async def handle_message(update: Update, ctx: ContextTypes.DEFAULT_TYPE):
log.info("suggest_result: type=%s", suggest.get("type"))
await update.message.reply_text(answer[:4000], reply_markup=KEYBOARD)
except asyncio.CancelledError:
log.info("Freitext-Lauf abgebrochen")
return
except Exception as e:
log.exception("Fehler bei Freitext")
await update.message.reply_text(f"Fehler: {e}")
finally:
ACTIVE_LLM_TASKS.pop(update.effective_chat.id, None)
async def handle_callback(update: Update, ctx: ContextTypes.DEFAULT_TYPE):